Compare commits

..

1 Commits

Author SHA1 Message Date
dgtlmoon
b749d9a923 Re #1231 2022-12-14 17:51:49 +01:00
116 changed files with 1101 additions and 5082 deletions

View File

@@ -50,6 +50,7 @@ jobs:
python -m pip install --upgrade pip
pip install flake8 pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
- name: Create release metadata
run: |
@@ -98,8 +99,6 @@ jobs:
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
# Looks like this was disabled
# provenance: false
# A new tagged release is required, which builds :tag and :latest
- name: Build and push :tag
@@ -118,8 +117,6 @@ jobs:
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
# Looks like this was disabled
# provenance: false
- name: Image digest
run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}

View File

@@ -19,6 +19,12 @@ jobs:
with:
python-version: 3.9
# - name: Install dependencies
# run: |
# python -m pip install --upgrade pip
# pip install flake8 pytest
# if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
# if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
- name: Test that pip builds without error
run: |

View File

@@ -10,13 +10,11 @@ on:
paths:
- requirements.txt
- Dockerfile
- .github/workflows/*
pull_request:
paths:
- requirements.txt
- Dockerfile
- .github/workflows/*
# Changes to requirements.txt packages and Dockerfile may or may not always be compatible with arm etc, so worth testing
# @todo: some kind of path filter for requirements.txt and Dockerfile

View File

@@ -8,73 +8,32 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
# Mainly just for link/flake8
- name: Set up Python 3.10
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: '3.10'
python-version: 3.9
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install flake8 pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
- name: Lint with flake8
run: |
pip3 install flake8
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Spin up ancillary testable services
- name: Unit tests
run: |
docker network create changedet-network
python3 -m unittest changedetectionio.tests.unit.test_notification_diff
# Selenium+browserless
docker run --network changedet-network -d --hostname selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome-debug:3.141.59
docker run --network changedet-network -d --hostname browserless -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.53-chrome-stable
- name: Build changedetection.io container for testing
run: |
# Build a changedetection.io container and start testing inside
docker build . -t test-changedetectionio
- name: Test built container with pytest
- name: Test with pytest
run: |
# Unit tests
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_notification_diff'
# All tests
docker run --network changedet-network test-changedetectionio bash -c 'cd changedetectionio && ./run_basic_tests.sh'
# Each test is totally isolated and performs its own cleanup/reset
cd changedetectionio; ./run_all_tests.sh
- name: Test built container selenium+browserless/playwright
run: |
# Selenium fetch
docker run --rm -e "WEBDRIVER_URL=http://selenium:4444/wd/hub" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py'
# Playwright/Browserless fetch
docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
# restock detection via playwright - added name=changedet here so that playwright/browserless can connect to it
docker run --rm --name "changedet" -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-port=5004 --live-server-host=0.0.0.0 tests/restock/test_restock.py'
- name: Test proxy interaction
run: |
cd changedetectionio
./run_proxy_tests.sh
cd ..
- name: Test changedetection.io container starts+runs basically without error
run: |
docker run -p 5556:5000 -d test-changedetectionio
sleep 3
# Should return 0 (no error) when grep finds it
curl -s http://localhost:5556 |grep -q checkbox-uuid
# and IPv6
curl -s -g -6 "http://[::1]:5556"|grep -q checkbox-uuid
#export WEBDRIVER_URL=http://localhost:4444/wd/hub
#pytest tests/fetchers/test_content.py
#pytest tests/test_errorhandling.py

View File

@@ -7,3 +7,9 @@ Otherwise, it's always best to PR into the `dev` branch.
Please be sure that all new functionality has a matching test!
Use `pytest` to validate/test, you can run the existing tests as `pytest tests/test_notification.py` for example
```
pip3 install -r requirements-dev
```
this is from https://github.com/dgtlmoon/changedetection.io/blob/master/requirements-dev.txt

View File

@@ -1,5 +1,5 @@
# pip dependencies install stage
FROM python:3.10-slim as builder
FROM python:3.8-slim as builder
# See `cryptography` pin comment in requirements.txt
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
@@ -29,16 +29,21 @@ RUN pip install --target=/dependencies playwright~=1.27.1 \
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
# Final image stage
FROM python:3.10-slim
FROM python:3.8-slim
# See `cryptography` pin comment in requirements.txt
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
# Re #93, #73, excluding rustc (adds another 430Mb~)
RUN apt-get update && apt-get install -y --no-install-recommends \
libssl1.1 \
libxslt1.1 \
# For pdftohtml
poppler-utils \
zlib1g \
&& apt-get clean && rm -rf /var/lib/apt/lists/*
g++ \
gcc \
libc-dev \
libffi-dev \
libjpeg-dev \
libssl-dev \
libxslt-dev \
zlib1g-dev
# https://stackoverflow.com/questions/58701233/docker-logs-erroneously-appears-empty-until-container-stops
ENV PYTHONUNBUFFERED=1

View File

@@ -1,7 +1,6 @@
recursive-include changedetectionio/api *
recursive-include changedetectionio/blueprint *
recursive-include changedetectionio/model *
recursive-include changedetectionio/processors *
recursive-include changedetectionio/res *
recursive-include changedetectionio/static *
recursive-include changedetectionio/templates *

View File

@@ -1,8 +1,6 @@
## Web Site Change Detection, Restock monitoring and notifications.
## Web Site Change Detection, Monitoring and Notification.
**_Detect website content changes and perform meaningful actions - trigger notifications via Discord, Email, Slack, Telegram, API calls and many more._**
_Live your data-life pro-actively._
_Live your data-life pro-actively, Detect website changes and perform meaningful actions, trigger notifications via Discord, Email, Slack, Telegram, API calls and many more._
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start?src=github)
@@ -11,7 +9,7 @@ _Live your data-life pro-actively._
![changedetection.io](https://github.com/dgtlmoon/changedetection.io/actions/workflows/test-only.yml/badge.svg?branch=master)
[**Don't have time? Let us host it for you! try our $8.99/month subscription - use our proxies and support!**](https://lemonade.changedetection.io/start) , _half the price of other website change monitoring services and comes with unlimited watches & checks!_
[**Don't have time? Let us host it for you! try our $6.99/month subscription - use our proxies and support!**](https://lemonade.changedetection.io/start) , _half the price of other website change monitoring services and comes with unlimited watches & checks!_
- Chrome browser included.
- Super fast, no registration needed setup.
@@ -45,11 +43,9 @@ Requires Playwright to be enabled.
- Products and services have a change in pricing
- _Out of stock notification_ and _Back In stock notification_
- Monitor and track PDF file changes, know when a PDF file has text changes.
- Governmental department updates (changes are often only on their websites)
- New software releases, security advisories when you're not on their mailing list.
- Festivals with changes
- Discogs restock alerts and monitoring
- Realestate listing changes
- Know when your favourite whiskey is on sale, or other special deals are announced before anyone else
- COVID related news from government websites
@@ -64,7 +60,6 @@ Requires Playwright to be enabled.
- You have a very sensitive list of URLs to watch and you do _not_ want to use the paid alternatives. (Remember, _you_ are the product)
- Get notified when certain keywords appear in Twitter search results
- Proactively search for jobs, get notified when companies update their careers page, search job portals for keywords.
- Get alerts when new job positions are open on Bamboo HR and other job platforms
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver and Playwright!</a>_
@@ -73,7 +68,6 @@ _Need an actual Chrome runner with Javascript support? We support fetching via W
- Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions!
- Target elements with xPath and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
- Switch between fast non-JS and Chrome JS based "fetchers"
- Track changes in PDF files (Monitor text changed in the PDF, Also monitor PDF filesize and checksums)
- Easily specify how often a site should be checked
- Execute JS before extracting text (Good for logging in, see examples in the UI!)
- Override Request Headers, Specify `POST` or `GET` and other methods
@@ -102,8 +96,6 @@ $ docker run -d --restart always -p "127.0.0.1:5000:5000" -v datastore-volume:/d
`:latest` tag is our latest stable release, `:dev` tag is our bleeding edge `master` branch.
Alternative docker repository over at ghcr - [ghcr.io/dgtlmoon/changedetection.io](https://ghcr.io/dgtlmoon/changedetection.io)
### Windows
See the install instructions at the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Microsoft-Windows
@@ -227,9 +219,6 @@ See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configura
Raspberry Pi and linux/arm/v6 linux/arm/v7 arm64 devices are supported! See the wiki for [details](https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver)
## API Support
Supports managing the website watch list [via our API](https://changedetection.io/docs/api_v1/index.html)
## Support us
@@ -254,5 +243,5 @@ I offer commercial support, this software is depended on by network security, ae
[test-shield]: https://github.com/dgtlmoon/changedetection.io/actions/workflows/test-only.yml/badge.svg?branch=master
[license-shield]: https://img.shields.io/github/license/dgtlmoon/changedetection.io.svg?style=for-the-badge
[release-link]: https://github.com/dgtlmoon/changedetection.io/releases
[release-link]: https://github.com/dgtlmoon.com/changedetection.io/releases
[docker-link]: https://hub.docker.com/r/dgtlmoon/changedetection.io

View File

@@ -7,7 +7,7 @@
from changedetectionio import changedetection
import multiprocessing
import sys
import signal
import os
def sigchld_handler(_signo, _stack_frame):
@@ -35,9 +35,6 @@ if __name__ == '__main__':
try:
while True:
time.sleep(1)
if not parse_process.is_alive():
# Process died/crashed for some reason, exit with error set
sys.exit(1)
except KeyboardInterrupt:
#parse_process.terminate() not needed, because this process will issue it to the sub-process anyway

View File

@@ -1,15 +1,5 @@
#!/usr/bin/python3
from changedetectionio import queuedWatchMetaData
from copy import deepcopy
from distutils.util import strtobool
from feedgen.feed import FeedGenerator
from flask_compress import Compress as FlaskCompress
from flask_login import current_user
from flask_restful import abort, Api
from flask_wtf import CSRFProtect
from functools import wraps
from threading import Event
import datetime
import flask_login
import logging
@@ -20,6 +10,12 @@ import threading
import time
import timeago
from changedetectionio import queuedWatchMetaData
from copy import deepcopy
from distutils.util import strtobool
from feedgen.feed import FeedGenerator
from threading import Event
from flask import (
Flask,
abort,
@@ -32,11 +28,15 @@ from flask import (
session,
url_for,
)
from flask_compress import Compress as FlaskCompress
from flask_login import login_required
from flask_restful import abort, Api
from flask_wtf import CSRFProtect
from changedetectionio import html_tools
from changedetectionio.api import api_v1
__version__ = '0.41.1'
__version__ = '0.40.0.2'
datastore = None
@@ -53,6 +53,7 @@ app = Flask(__name__,
static_url_path="",
static_folder="static",
template_folder="templates")
from flask_compress import Compress
# Super handy for compressing large BrowserSteps responses and others
FlaskCompress(app)
@@ -64,8 +65,7 @@ app.config.exit = Event()
app.config['NEW_VERSION_AVAILABLE'] = False
if os.getenv('FLASK_SERVER_NAME'):
app.config['SERVER_NAME'] = os.getenv('FLASK_SERVER_NAME')
app.config['LOGIN_DISABLED'] = False
#app.config["EXPLAIN_TEMPLATE_LOADING"] = True
@@ -74,6 +74,7 @@ app.config['TEMPLATES_AUTO_RELOAD'] = True
app.jinja_env.add_extension('jinja2.ext.loopcontrols')
csrf = CSRFProtect()
csrf.init_app(app)
notification_debug_log=[]
watch_api = Api(app, decorators=[csrf.exempt])
@@ -148,6 +149,7 @@ class User(flask_login.UserMixin):
# Compare given password against JSON store or Env var
def check_password(self, password):
import base64
import hashlib
@@ -155,9 +157,11 @@ class User(flask_login.UserMixin):
raw_salt_pass = os.getenv("SALTED_PASS", False)
if not raw_salt_pass:
raw_salt_pass = datastore.data['settings']['application'].get('password')
raw_salt_pass = datastore.data['settings']['application']['password']
raw_salt_pass = base64.b64decode(raw_salt_pass)
salt_from_storage = raw_salt_pass[:32] # 32 is the length of the salt
# Use the exact same setup you used to generate the key, but this time put in the password to check
@@ -167,44 +171,21 @@ class User(flask_login.UserMixin):
salt_from_storage,
100000
)
new_key = salt_from_storage + new_key
new_key = salt_from_storage + new_key
return new_key == raw_salt_pass
pass
def login_optionally_required(func):
@wraps(func)
def decorated_view(*args, **kwargs):
has_password_enabled = datastore.data['settings']['application'].get('password') or os.getenv("SALTED_PASS", False)
# Permitted
if request.endpoint == 'static_content' and request.view_args['group'] == 'styles':
return func(*args, **kwargs)
# Permitted
elif request.endpoint == 'diff_history_page' and datastore.data['settings']['application'].get('shared_diff_access'):
return func(*args, **kwargs)
elif request.method in flask_login.config.EXEMPT_METHODS:
return func(*args, **kwargs)
elif app.config.get('LOGIN_DISABLED'):
return func(*args, **kwargs)
elif has_password_enabled and not current_user.is_authenticated:
return app.login_manager.unauthorized()
return func(*args, **kwargs)
return decorated_view
def changedetection_app(config=None, datastore_o=None):
global datastore
datastore = datastore_o
# so far just for read-only via tests, but this will be moved eventually to be the main source
# (instead of the global var)
app.config['DATASTORE'] = datastore_o
app.config['DATASTORE']=datastore_o
#app.config.update(config or {})
login_manager = flask_login.LoginManager(app)
login_manager.login_view = 'login'
@@ -232,8 +213,6 @@ def changedetection_app(config=None, datastore_o=None):
# https://flask-cors.readthedocs.io/en/latest/
# CORS(app)
@login_manager.user_loader
def user_loader(email):
user = User()
@@ -242,7 +221,7 @@ def changedetection_app(config=None, datastore_o=None):
@login_manager.unauthorized_handler
def unauthorized_handler():
flash("You must be logged in, please log in.", 'error')
# @todo validate its a URL of this host and use that
return redirect(url_for('login', next=url_for('index')))
@app.route('/logout')
@@ -255,6 +234,10 @@ def changedetection_app(config=None, datastore_o=None):
@app.route('/login', methods=['GET', 'POST'])
def login():
if not datastore.data['settings']['application']['password'] and not os.getenv("SALTED_PASS", False):
flash("Login not required, no password enabled.", "notice")
return redirect(url_for('index'))
if request.method == 'GET':
if flask_login.current_user.is_authenticated:
flash("Already logged in")
@@ -289,22 +272,27 @@ def changedetection_app(config=None, datastore_o=None):
return redirect(url_for('login'))
@app.before_request
def before_request_handle_cookie_x_settings():
def do_something_whenever_a_request_comes_in():
# Disable password login if there is not one set
# (No password in settings or env var)
app.config['LOGIN_DISABLED'] = datastore.data['settings']['application']['password'] == False and os.getenv("SALTED_PASS", False) == False
# Set the auth cookie path if we're running as X-settings/X-Forwarded-Prefix
if os.getenv('USE_X_SETTINGS') and 'X-Forwarded-Prefix' in request.headers:
app.config['REMEMBER_COOKIE_PATH'] = request.headers['X-Forwarded-Prefix']
app.config['SESSION_COOKIE_PATH'] = request.headers['X-Forwarded-Prefix']
return None
# For the RSS path, allow access via a token
if request.path == '/rss' and request.args.get('token'):
app_rss_token = datastore.data['settings']['application']['rss_access_token']
rss_url_token = request.args.get('token')
if app_rss_token == rss_url_token:
app.config['LOGIN_DISABLED'] = True
@app.route("/rss", methods=['GET'])
@login_required
def rss():
# Always requires token set
app_rss_token = datastore.data['settings']['application'].get('rss_access_token')
rss_url_token = request.args.get('token')
if rss_url_token != app_rss_token:
return "Access denied, bad token", 403
from . import diff
limit_tag = request.args.get('tag')
@@ -340,6 +328,8 @@ def changedetection_app(config=None, datastore_o=None):
if len(dates) < 2:
continue
prev_fname = watch.history[dates[-2]]
if not watch.viewed:
# Re #239 - GUID needs to be individual for each event
# @todo In the future make this a configurable link back (see work on BASE_URL https://github.com/dgtlmoon/changedetection.io/pull/228)
@@ -360,12 +350,9 @@ def changedetection_app(config=None, datastore_o=None):
watch_title = watch.get('title') if watch.get('title') else watch.get('url')
fe.title(title=watch_title)
latest_fname = watch.history[dates[-1]]
html_diff = diff.render_diff(previous_version_file_contents=watch.get_history_snapshot(dates[-2]),
newest_version_file_contents=watch.get_history_snapshot(dates[-1]),
include_equal=False,
line_feed_sep="<br>")
html_diff = diff.render_diff(prev_fname, latest_fname, include_equal=False, line_feed_sep="</br>")
fe.content(content="<html><body><h4>{}</h4>{}</body></html>".format(watch_title, html_diff),
type='CDATA')
@@ -379,7 +366,7 @@ def changedetection_app(config=None, datastore_o=None):
return response
@app.route("/", methods=['GET'])
@login_optionally_required
@login_required
def index():
from changedetectionio import forms
@@ -392,9 +379,9 @@ def changedetection_app(config=None, datastore_o=None):
if op:
uuid = request.args.get('uuid')
if op == 'pause':
datastore.data['watching'][uuid].toggle_pause()
datastore.data['watching'][uuid]['paused'] ^= True
elif op == 'mute':
datastore.data['watching'][uuid].toggle_mute()
datastore.data['watching'][uuid]['notification_muted'] ^= True
datastore.needs_write = True
return redirect(url_for('index', tag = limit_tag))
@@ -419,21 +406,17 @@ def changedetection_app(config=None, datastore_o=None):
existing_tags = datastore.get_all_tags()
form = forms.quickWatchForm(request.form)
output = render_template(
"watch-overview.html",
# Don't link to hosting when we're on the hosting environment
output = render_template("watch-overview.html",
form=form,
watches=sorted_watches,
tags=existing_tags,
active_tag=limit_tag,
app_rss_token=datastore.data['settings']['application']['rss_access_token'],
form=form,
guid=datastore.data['app_guid'],
has_proxies=datastore.proxy_list,
has_unviewed=datastore.has_unviewed,
# Don't link to hosting when we're on the hosting environment
hosted_sticky=os.getenv("SALTED_PASS", False) == False,
queued_uuids=[q_uuid.item['uuid'] for q_uuid in update_q.queue],
system_default_fetcher=datastore.data['settings']['application'].get('fetch_backend'),
tags=existing_tags,
watches=sorted_watches
)
guid=datastore.data['app_guid'],
queued_uuids=[q_uuid.item['uuid'] for q_uuid in update_q.queue])
if session.get('share-link'):
@@ -443,7 +426,7 @@ def changedetection_app(config=None, datastore_o=None):
# AJAX endpoint for sending a test
@app.route("/notification/send-test", methods=['POST'])
@login_optionally_required
@login_required
def ajax_callback_send_notification_test():
import apprise
@@ -476,7 +459,7 @@ def changedetection_app(config=None, datastore_o=None):
@app.route("/clear_history/<string:uuid>", methods=['GET'])
@login_optionally_required
@login_required
def clear_watch_history(uuid):
try:
datastore.clear_watch_history(uuid)
@@ -488,7 +471,7 @@ def changedetection_app(config=None, datastore_o=None):
return redirect(url_for('index'))
@app.route("/clear_history", methods=['GET', 'POST'])
@login_optionally_required
@login_required
def clear_all_history():
if request.method == 'POST':
@@ -509,15 +492,49 @@ def changedetection_app(config=None, datastore_o=None):
output = render_template("clear_all_history.html")
return output
# If they edited an existing watch, we need to know to reset the current/previous md5 to include
# the excluded text.
def get_current_checksum_include_ignore_text(uuid):
import hashlib
from changedetectionio import fetch_site_status
# Get the most recent one
newest_history_key = datastore.data['watching'][uuid].get('newest_history_key')
# 0 means that theres only one, so that there should be no 'unviewed' history available
if newest_history_key == 0:
newest_history_key = list(datastore.data['watching'][uuid].history.keys())[0]
if newest_history_key:
with open(datastore.data['watching'][uuid].history[newest_history_key],
encoding='utf-8') as file:
raw_content = file.read()
handler = fetch_site_status.perform_site_check(datastore=datastore)
stripped_content = html_tools.strip_ignore_text(raw_content,
datastore.data['watching'][uuid]['ignore_text'])
if datastore.data['settings']['application'].get('ignore_whitespace', False):
checksum = hashlib.md5(stripped_content.translate(None, b'\r\n\t ')).hexdigest()
else:
checksum = hashlib.md5(stripped_content).hexdigest()
return checksum
return datastore.data['watching'][uuid]['previous_md5']
@app.route("/edit/<string:uuid>", methods=['GET', 'POST'])
@login_optionally_required
@login_required
# https://stackoverflow.com/questions/42984453/wtforms-populate-form-with-data-if-data-exists
# https://wtforms.readthedocs.io/en/3.0.x/forms/#wtforms.form.Form.populate_obj ?
def edit_page(uuid):
from . import forms
from .blueprint.browser_steps.browser_steps import browser_step_ui_config
from . import processors
from changedetectionio import forms
from changedetectionio.blueprint.browser_steps.browser_steps import browser_step_ui_config
using_default_check_time = True
# More for testing, possible to return the first/only
@@ -532,15 +549,6 @@ def changedetection_app(config=None, datastore_o=None):
flash("No watch with the UUID %s found." % (uuid), "error")
return redirect(url_for('index'))
switch_processor = request.args.get('switch_processor')
if switch_processor:
for p in processors.available_processors():
if p[0] == switch_processor:
datastore.data['watching'][uuid]['processor'] = switch_processor
flash(f"Switched to mode - {p[1]}.")
datastore.clear_watch_history(uuid)
redirect(url_for('edit_page', uuid=uuid))
# be sure we update with a copy instead of accidently editing the live object by reference
default = deepcopy(datastore.data['watching'][uuid])
@@ -560,8 +568,6 @@ def changedetection_app(config=None, datastore_o=None):
data=default,
)
form.fetch_backend.choices.append(("system", 'System settings default'))
# form.browser_steps[0] can be assumed that we 'goto url' first
if datastore.proxy_list is None:
@@ -574,7 +580,6 @@ def changedetection_app(config=None, datastore_o=None):
if request.method == 'POST' and form.validate():
extra_update_obj = {}
if request.args.get('unpause_on_save'):
@@ -591,6 +596,10 @@ def changedetection_app(config=None, datastore_o=None):
using_default_check_time = False
break
# Use the default if it's the same as system-wide.
if form.fetch_backend.data == datastore.data['settings']['application']['fetch_backend']:
extra_update_obj['fetch_backend'] = None
# Ignore text
@@ -601,16 +610,6 @@ def changedetection_app(config=None, datastore_o=None):
if datastore.proxy_list is not None and form.data['proxy'] == '':
extra_update_obj['proxy'] = None
# Unsetting all filter_text methods should make it go back to default
# This particularly affects tests running
if 'filter_text_added' in form.data and not form.data.get('filter_text_added') \
and 'filter_text_replaced' in form.data and not form.data.get('filter_text_replaced') \
and 'filter_text_removed' in form.data and not form.data.get('filter_text_removed'):
extra_update_obj['filter_text_added'] = True
extra_update_obj['filter_text_replaced'] = True
extra_update_obj['filter_text_removed'] = True
datastore.data['watching'][uuid].update(form.data)
datastore.data['watching'][uuid].update(extra_update_obj)
@@ -638,6 +637,8 @@ def changedetection_app(config=None, datastore_o=None):
visualselector_data_is_ready = datastore.visualselector_data_is_ready(uuid)
# Only works reliably with Playwright
visualselector_enabled = os.getenv('PLAYWRIGHT_DRIVER_URL', False) and default['fetch_backend'] == 'html_webdriver'
# JQ is difficult to install on windows and must be manually added (outside requirements.txt)
jq_support = True
@@ -648,16 +649,10 @@ def changedetection_app(config=None, datastore_o=None):
watch = datastore.data['watching'].get(uuid)
system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver'
is_html_webdriver = False
if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver':
is_html_webdriver = True
# Only works reliably with Playwright
visualselector_enabled = os.getenv('PLAYWRIGHT_DRIVER_URL', False) and is_html_webdriver
is_html_webdriver = True if watch.get('fetch_backend') == 'html_webdriver' or (
watch.get('fetch_backend', None) is None and system_uses_webdriver) else False
output = render_template("edit.html",
available_processors=processors.available_processors(),
browser_steps_config=browser_step_ui_config,
current_base_url=datastore.data['settings']['application']['base_url'],
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
@@ -677,7 +672,7 @@ def changedetection_app(config=None, datastore_o=None):
return output
@app.route("/settings", methods=['GET', "POST"])
@login_optionally_required
@login_required
def settings_page():
from changedetectionio import content_fetcher, forms
@@ -757,11 +752,9 @@ def changedetection_app(config=None, datastore_o=None):
return output
@app.route("/import", methods=['GET', "POST"])
@login_optionally_required
@login_required
def import_page():
remaining_urls = []
from . import forms
if request.method == 'POST':
from .importer import import_url_list, import_distill_io_json
@@ -769,7 +762,7 @@ def changedetection_app(config=None, datastore_o=None):
if request.values.get('urls') and len(request.values.get('urls').strip()):
# Import and push into the queue for immediate update check
importer = import_url_list()
importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore, processor=request.values.get('processor'))
importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore)
for uuid in importer.new_uuids:
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
@@ -787,12 +780,9 @@ def changedetection_app(config=None, datastore_o=None):
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
form = forms.importForm(formdata=request.form if request.method == 'POST' else None,
# data=default,
)
# Could be some remaining, or we could be on GET
output = render_template("import.html",
form=form,
import_url_list_remaining="\n".join(remaining_urls),
original_distill_json=''
)
@@ -800,7 +790,7 @@ def changedetection_app(config=None, datastore_o=None):
# Clear all statuses, so we do not see the 'unviewed' class
@app.route("/form/mark-all-viewed", methods=['GET'])
@login_optionally_required
@login_required
def mark_all_viewed():
# Save the current newest history as the most recently viewed
@@ -810,7 +800,7 @@ def changedetection_app(config=None, datastore_o=None):
return redirect(url_for('index'))
@app.route("/diff/<string:uuid>", methods=['GET', 'POST'])
@login_optionally_required
@login_required
def diff_history_page(uuid):
from changedetectionio import forms
@@ -858,35 +848,36 @@ def changedetection_app(config=None, datastore_o=None):
# Save the current newest history as the most recently viewed
datastore.set_last_viewed(uuid, time.time())
newest_file = history[dates[-1]]
# Read as binary and force decode as UTF-8
# Windows may fail decode in python if we just use 'r' mode (chardet decode exception)
try:
newest_version_file_contents = watch.get_history_snapshot(dates[-1])
with open(newest_file, 'r', encoding='utf-8', errors='ignore') as f:
newest_version_file_contents = f.read()
except Exception as e:
newest_version_file_contents = "Unable to read {}.\n".format(dates[-1])
newest_version_file_contents = "Unable to read {}.\n".format(newest_file)
previous_version = request.args.get('previous_version')
previous_timestamp = dates[-2]
if previous_version:
previous_timestamp = previous_version
try:
previous_file = history[previous_version]
except KeyError:
# Not present, use a default value, the second one in the sorted list.
previous_file = history[dates[-2]]
try:
previous_version_file_contents = watch.get_history_snapshot(previous_timestamp)
with open(previous_file, 'r', encoding='utf-8', errors='ignore') as f:
previous_version_file_contents = f.read()
except Exception as e:
previous_version_file_contents = "Unable to read {}.\n".format(previous_timestamp)
previous_version_file_contents = "Unable to read {}.\n".format(previous_file)
screenshot_url = watch.get_screenshot()
system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver'
is_html_webdriver = False
if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver':
is_html_webdriver = True
password_enabled_and_share_is_off = False
if datastore.data['settings']['application'].get('password') or os.getenv("SALTED_PASS", False):
password_enabled_and_share_is_off = not datastore.data['settings']['application'].get('shared_diff_access')
is_html_webdriver = True if watch.get('fetch_backend') == 'html_webdriver' or (
watch.get('fetch_backend', None) is None and system_uses_webdriver) else False
output = render_template("diff.html",
current_diff_url=watch['url'],
@@ -901,7 +892,6 @@ def changedetection_app(config=None, datastore_o=None):
left_sticky=True,
newest=newest_version_file_contents,
newest_version_timestamp=dates[-1],
password_enabled_and_share_is_off=password_enabled_and_share_is_off,
previous=previous_version_file_contents,
screenshot=screenshot_url,
uuid=uuid,
@@ -912,7 +902,7 @@ def changedetection_app(config=None, datastore_o=None):
return output
@app.route("/preview/<string:uuid>", methods=['GET'])
@login_optionally_required
@login_required
def preview_page(uuid):
content = []
ignored_line_numbers = []
@@ -932,9 +922,8 @@ def changedetection_app(config=None, datastore_o=None):
extra_stylesheets = [url_for('static_content', group='styles', filename='diff.css')]
is_html_webdriver = False
if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver':
is_html_webdriver = True
is_html_webdriver = True if watch.get('fetch_backend') == 'html_webdriver' or (
watch.get('fetch_backend', None) is None and system_uses_webdriver) else False
# Never requested successfully, but we detected a fetch error
if datastore.data['watching'][uuid].history_n == 0 and (watch.get_error_text() or watch.get_error_snapshot()):
@@ -953,35 +942,37 @@ def changedetection_app(config=None, datastore_o=None):
return output
timestamp = list(watch.history.keys())[-1]
filename = watch.history[timestamp]
try:
tmp = watch.get_history_snapshot(timestamp).splitlines()
with open(filename, 'r', encoding='utf-8', errors='ignore') as f:
tmp = f.readlines()
# Get what needs to be highlighted
ignore_rules = watch.get('ignore_text', []) + datastore.data['settings']['application']['global_ignore_text']
# Get what needs to be highlighted
ignore_rules = watch.get('ignore_text', []) + datastore.data['settings']['application']['global_ignore_text']
# .readlines will keep the \n, but we will parse it here again, in the future tidy this up
ignored_line_numbers = html_tools.strip_ignore_text(content="\n".join(tmp),
wordlist=ignore_rules,
mode='line numbers'
)
# .readlines will keep the \n, but we will parse it here again, in the future tidy this up
ignored_line_numbers = html_tools.strip_ignore_text(content="".join(tmp),
wordlist=ignore_rules,
mode='line numbers'
)
trigger_line_numbers = html_tools.strip_ignore_text(content="\n".join(tmp),
wordlist=watch['trigger_text'],
mode='line numbers'
)
# Prepare the classes and lines used in the template
i=0
for l in tmp:
classes=[]
i+=1
if i in ignored_line_numbers:
classes.append('ignored')
if i in trigger_line_numbers:
classes.append('triggered')
content.append({'line': l, 'classes': ' '.join(classes)})
trigger_line_numbers = html_tools.strip_ignore_text(content="".join(tmp),
wordlist=watch['trigger_text'],
mode='line numbers'
)
# Prepare the classes and lines used in the template
i=0
for l in tmp:
classes=[]
i+=1
if i in ignored_line_numbers:
classes.append('ignored')
if i in trigger_line_numbers:
classes.append('triggered')
content.append({'line': l, 'classes': ' '.join(classes)})
except Exception as e:
content.append({'line': f"File doesnt exist or unable to read timestamp {timestamp}", 'classes': ''})
content.append({'line': "File doesnt exist or unable to read file {}".format(filename), 'classes': ''})
output = render_template("preview.html",
content=content,
@@ -1001,7 +992,7 @@ def changedetection_app(config=None, datastore_o=None):
return output
@app.route("/settings/notification-logs", methods=['GET'])
@login_optionally_required
@login_required
def notification_logs():
global notification_debug_log
output = render_template("notification-log.html",
@@ -1011,7 +1002,7 @@ def changedetection_app(config=None, datastore_o=None):
# We're good but backups are even better!
@app.route("/backup", methods=['GET'])
@login_optionally_required
@login_required
def get_backup():
import zipfile
@@ -1023,8 +1014,7 @@ def changedetection_app(config=None, datastore_o=None):
os.unlink(previous_backup_filename)
# create a ZipFile object
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
backupname = "changedetection-backup-{}.zip".format(timestamp)
backupname = "changedetection-backup-{}.zip".format(int(time.time()))
backup_filepath = os.path.join(datastore_o.datastore_path, backupname)
with zipfile.ZipFile(backup_filepath, "w",
@@ -1132,14 +1122,13 @@ def changedetection_app(config=None, datastore_o=None):
abort(404)
@app.route("/form/add/quickwatch", methods=['POST'])
@login_optionally_required
@login_required
def form_quick_watch_add():
from changedetectionio import forms
form = forms.quickWatchForm(request.form)
if not form.validate():
for widget, l in form.errors.items():
flash(','.join(l), 'error')
flash("Error")
return redirect(url_for('index'))
url = request.form.get('url').strip()
@@ -1148,24 +1137,24 @@ def changedetection_app(config=None, datastore_o=None):
return redirect(url_for('index'))
add_paused = request.form.get('edit_and_watch_submit_button') != None
processor = request.form.get('processor', 'text_json_diff')
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip(), extras={'paused': add_paused, 'processor': processor})
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip(), extras={'paused': add_paused})
if new_uuid:
if add_paused:
flash('Watch added in Paused state, saving will unpause.')
return redirect(url_for('edit_page', uuid=new_uuid, unpause_on_save=1))
else:
# Straight into the queue.
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid}))
flash("Watch added.")
if not add_paused and new_uuid:
# Straight into the queue.
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid}))
flash("Watch added.")
if add_paused:
flash('Watch added in Paused state, saving will unpause.')
return redirect(url_for('edit_page', uuid=new_uuid, unpause_on_save=1))
return redirect(url_for('index'))
@app.route("/api/delete", methods=['GET'])
@login_optionally_required
@login_required
def form_delete():
uuid = request.args.get('uuid')
@@ -1182,7 +1171,7 @@ def changedetection_app(config=None, datastore_o=None):
return redirect(url_for('index'))
@app.route("/api/clone", methods=['GET'])
@login_optionally_required
@login_required
def form_clone():
uuid = request.args.get('uuid')
# More for testing, possible to return the first/only
@@ -1190,15 +1179,13 @@ def changedetection_app(config=None, datastore_o=None):
uuid = list(datastore.data['watching'].keys()).pop()
new_uuid = datastore.clone(uuid)
if new_uuid:
if not datastore.data['watching'].get(uuid).get('paused'):
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=5, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
flash('Cloned.')
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=5, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
flash('Cloned.')
return redirect(url_for('index'))
@app.route("/api/checknow", methods=['GET'])
@login_optionally_required
@login_required
def form_watch_checknow():
# Forced recheck will skip the 'skip if content is the same' rule (, 'reprocess_existing_data': True})))
tag = request.args.get('tag')
@@ -1228,11 +1215,11 @@ def changedetection_app(config=None, datastore_o=None):
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False}))
i += 1
flash("{} watches queued for rechecking.".format(i))
flash("{} watches are queued for rechecking.".format(i))
return redirect(url_for('index', tag=tag))
@app.route("/form/checkbox-operations", methods=['POST'])
@login_optionally_required
@login_required
def form_watch_list_checkbox_operations():
op = request.form['op']
uuids = request.form.getlist('uuids')
@@ -1249,6 +1236,7 @@ def changedetection_app(config=None, datastore_o=None):
uuid = uuid.strip()
if datastore.data['watching'].get(uuid):
datastore.data['watching'][uuid.strip()]['paused'] = True
flash("{} watches paused".format(len(uuids)))
elif (op == 'unpause'):
@@ -1272,14 +1260,6 @@ def changedetection_app(config=None, datastore_o=None):
datastore.data['watching'][uuid.strip()]['notification_muted'] = False
flash("{} watches un-muted".format(len(uuids)))
elif (op == 'recheck'):
for uuid in uuids:
uuid = uuid.strip()
if datastore.data['watching'].get(uuid):
# Recheck and require a full reprocessing
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
flash("{} watches queued for rechecking".format(len(uuids)))
elif (op == 'notification-default'):
from changedetectionio.notification import (
default_notification_format_for_watch
@@ -1296,7 +1276,7 @@ def changedetection_app(config=None, datastore_o=None):
return redirect(url_for('index'))
@app.route("/api/share-url", methods=['GET'])
@login_optionally_required
@login_required
def form_share_put_watch():
"""Given a watch UUID, upload the info and return a share-link
the share-link can be imported/added"""
@@ -1461,11 +1441,7 @@ def ticker_thread_check_time_launch_checks():
watch_uuid_list = []
while True:
try:
# Get a list of watches sorted by last_checked, [1] because it gets passed a tuple
# This is so we examine the most over-due first
for k in sorted(datastore.data['watching'].items(), key=lambda item: item[1].get('last_checked',0)):
watch_uuid_list.append(k[0])
watch_uuid_list = datastore.data['watching'].keys()
except RuntimeError as e:
# RuntimeError: dictionary changed size during iteration
time.sleep(0.1)

View File

@@ -1,117 +0,0 @@
# Responsible for building the storage dict into a set of rules ("JSON Schema") acceptable via the API
# Probably other ways to solve this when the backend switches to some ORM
def build_time_between_check_json_schema():
# Setup time between check schema
schema_properties_time_between_check = {
"type": "object",
"additionalProperties": False,
"properties": {}
}
for p in ['weeks', 'days', 'hours', 'minutes', 'seconds']:
schema_properties_time_between_check['properties'][p] = {
"anyOf": [
{
"type": "integer"
},
{
"type": "null"
}
]
}
return schema_properties_time_between_check
def build_watch_json_schema(d):
# Base JSON schema
schema = {
'type': 'object',
'properties': {},
}
for k, v in d.items():
# @todo 'integer' is not covered here because its almost always for internal usage
if isinstance(v, type(None)):
schema['properties'][k] = {
"anyOf": [
{"type": "null"},
]
}
elif isinstance(v, list):
schema['properties'][k] = {
"anyOf": [
{"type": "array",
# Always is an array of strings, like text or regex or something
"items": {
"type": "string",
"maxLength": 5000
}
},
]
}
elif isinstance(v, bool):
schema['properties'][k] = {
"anyOf": [
{"type": "boolean"},
]
}
elif isinstance(v, str):
schema['properties'][k] = {
"anyOf": [
{"type": "string",
"maxLength": 5000},
]
}
# Can also be a string (or None by default above)
for v in ['body',
'notification_body',
'notification_format',
'notification_title',
'proxy',
'tag',
'title',
'webdriver_js_execute_code'
]:
schema['properties'][v]['anyOf'].append({'type': 'string', "maxLength": 5000})
# None or Boolean
schema['properties']['track_ldjson_price_data']['anyOf'].append({'type': 'boolean'})
schema['properties']['method'] = {"type": "string",
"enum": ["GET", "POST", "DELETE", "PUT"]
}
schema['properties']['fetch_backend']['anyOf'].append({"type": "string",
"enum": ["html_requests", "html_webdriver"]
})
# All headers must be key/value type dict
schema['properties']['headers'] = {
"type": "object",
"patternProperties": {
# Should always be a string:string type value
".*": {"type": "string"},
}
}
from changedetectionio.notification import valid_notification_formats
schema['properties']['notification_format'] = {'type': 'string',
'enum': list(valid_notification_formats.keys())
}
# Stuff that shouldn't be available but is just state-storage
for v in ['previous_md5', 'last_error', 'has_ldjson_price_data', 'previous_md5_before_filters', 'uuid']:
del schema['properties'][v]
schema['properties']['webdriver_delay']['anyOf'].append({'type': 'integer'})
schema['properties']['time_between_check'] = build_time_between_check_json_schema()
# headers ?
return schema

View File

@@ -1,24 +1,12 @@
from flask_expects_json import expects_json
from changedetectionio import queuedWatchMetaData
from flask_restful import abort, Resource
from flask import request, make_response
import validators
from . import auth
import copy
# See docs/README.md for rebuilding the docs/apidoc information
from . import api_schema
# Build a JSON Schema atleast partially based on our Watch model
from changedetectionio.model.Watch import base_config as watch_base_config
schema = api_schema.build_watch_json_schema(watch_base_config)
schema_create_watch = copy.deepcopy(schema)
schema_create_watch['required'] = ['url']
schema_update_watch = copy.deepcopy(schema)
schema_update_watch['additionalProperties'] = False
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
class Watch(Resource):
def __init__(self, **kwargs):
@@ -28,26 +16,9 @@ class Watch(Resource):
# Get information about a single watch, excluding the history list (can be large)
# curl http://localhost:4000/api/v1/watch/<string:uuid>
# @todo - version2 - ?muted and ?paused should be able to be called together, return the watch struct not "OK"
# ?recheck=true
@auth.check_token
def get(self, uuid):
"""
@api {get} /api/v1/watch/:uuid Get a single watch data
@apiDescription Retrieve watch information and set muted/paused status
@apiExample {curl} Example usage:
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -H"x-api-key:813031b16330fe25e3780cf0325daa45"
curl "http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091?muted=unmuted" -H"x-api-key:813031b16330fe25e3780cf0325daa45"
curl "http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091?paused=unpaused" -H"x-api-key:813031b16330fe25e3780cf0325daa45"
@apiName Watch
@apiGroup Watch
@apiParam {uuid} uuid Watch unique ID.
@apiQuery {Boolean} [recheck] Recheck this watch `recheck=1`
@apiQuery {String} [paused] =`paused` or =`unpaused` , Sets the PAUSED state
@apiQuery {String} [muted] =`muted` or =`unmuted` , Sets the MUTE NOTIFICATIONS state
@apiSuccess (200) {String} OK When paused/muted/recheck operation OR full JSON object of the watch
@apiSuccess (200) {JSON} WatchJSON JSON Full JSON object of the watch
"""
from copy import deepcopy
watch = deepcopy(self.datastore.data['watching'].get(uuid))
if not watch:
@@ -56,72 +27,19 @@ class Watch(Resource):
if request.args.get('recheck'):
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
return "OK", 200
if request.args.get('paused', '') == 'paused':
self.datastore.data['watching'].get(uuid).pause()
return "OK", 200
elif request.args.get('paused', '') == 'unpaused':
self.datastore.data['watching'].get(uuid).unpause()
return "OK", 200
if request.args.get('muted', '') == 'muted':
self.datastore.data['watching'].get(uuid).mute()
return "OK", 200
elif request.args.get('muted', '') == 'unmuted':
self.datastore.data['watching'].get(uuid).unmute()
return "OK", 200
# Return without history, get that via another API call
# Properties are not returned as a JSON, so add the required props manually
watch['history_n'] = watch.history_n
watch['last_changed'] = watch.last_changed
return watch
@auth.check_token
def delete(self, uuid):
"""
@api {delete} /api/v1/watch/:uuid Delete a watch and related history
@apiExample {curl} Example usage:
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X DELETE -H"x-api-key:813031b16330fe25e3780cf0325daa45"
@apiParam {uuid} uuid Watch unique ID.
@apiName Delete
@apiGroup Watch
@apiSuccess (200) {String} OK Was deleted
"""
if not self.datastore.data['watching'].get(uuid):
abort(400, message='No watch exists with the UUID of {}'.format(uuid))
self.datastore.delete(uuid)
return 'OK', 204
@auth.check_token
@expects_json(schema_update_watch)
def put(self, uuid):
"""
@api {put} /api/v1/watch/:uuid Update watch information
@apiExample {curl} Example usage:
Update (PUT)
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X PUT -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"url": "https://my-nice.com" , "tag": "new list"}'
@apiDescription Updates an existing watch using JSON, accepts the same structure as returned in <a href="#api-Watch-Watch">get single watch information</a>
@apiParam {uuid} uuid Watch unique ID.
@apiName Update a watch
@apiGroup Watch
@apiSuccess (200) {String} OK Was updated
@apiSuccess (500) {String} ERR Some other error
"""
watch = self.datastore.data['watching'].get(uuid)
if not watch:
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
if request.json.get('proxy'):
plist = self.datastore.proxy_list
if not request.json.get('proxy') in plist:
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
watch.update(request.json)
return "OK", 200
class WatchHistory(Resource):
def __init__(self, **kwargs):
@@ -131,21 +49,6 @@ class WatchHistory(Resource):
# Get a list of available history for a watch by UUID
# curl http://localhost:4000/api/v1/watch/<string:uuid>/history
def get(self, uuid):
"""
@api {get} /api/v1/watch/<string:uuid>/history Get a list of all historical snapshots available for a watch
@apiDescription Requires `uuid`, returns list
@apiExample {curl} Example usage:
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json"
{
"1676649279": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/cb7e9be8258368262246910e6a2a4c30.txt",
"1677092785": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/e20db368d6fc633e34f559ff67bb4044.txt",
"1677103794": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/02efdd37dacdae96554a8cc85dc9c945.txt"
}
@apiName Get list of available stored snapshots for watch
@apiGroup Watch History
@apiSuccess (200) {String} OK
@apiSuccess (404) {String} ERR Not found
"""
watch = self.datastore.data['watching'].get(uuid)
if not watch:
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
@@ -157,18 +60,11 @@ class WatchSingleHistory(Resource):
# datastore is a black box dependency
self.datastore = kwargs['datastore']
# Read a given history snapshot and return its content
# <string:timestamp> or "latest"
# curl http://localhost:4000/api/v1/watch/<string:uuid>/history/<int:timestamp>
@auth.check_token
def get(self, uuid, timestamp):
"""
@api {get} /api/v1/watch/<string:uuid>/history/<int:timestamp> Get single snapshot from watch
@apiDescription Requires watch `uuid` and `timestamp`. `timestamp` of "`latest`" for latest available snapshot, or <a href="#api-Watch_History-Get_list_of_available_stored_snapshots_for_watch">use the list returned here</a>
@apiExample {curl} Example usage:
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history/1677092977 -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json"
@apiName Get single snapshot content
@apiGroup Watch History
@apiSuccess (200) {String} OK
@apiSuccess (404) {String} ERR Not found
"""
watch = self.datastore.data['watching'].get(uuid)
if not watch:
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
@@ -179,7 +75,8 @@ class WatchSingleHistory(Resource):
if timestamp == 'latest':
timestamp = list(watch.history.keys())[-1]
content = watch.get_history_snapshot(timestamp)
with open(watch.history[timestamp], 'r') as f:
content = f.read()
response = make_response(content, 200)
response.mimetype = "text/plain"
@@ -193,83 +90,32 @@ class CreateWatch(Resource):
self.update_q = kwargs['update_q']
@auth.check_token
@expects_json(schema_create_watch)
def post(self):
"""
@api {post} /api/v1/watch Create a single watch
@apiDescription Requires atleast `url` set, can accept the same structure as <a href="#api-Watch-Watch">get single watch information</a> to create.
@apiExample {curl} Example usage:
curl http://localhost:4000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"url": "https://my-nice.com" , "tag": "nice list"}'
@apiName Create
@apiGroup Watch
@apiSuccess (200) {String} OK Was created
@apiSuccess (500) {String} ERR Some other error
"""
# curl http://localhost:4000/api/v1/watch -H "Content-Type: application/json" -d '{"url": "https://my-nice.com", "tag": "one, two" }'
json_data = request.get_json()
url = json_data['url'].strip()
tag = json_data['tag'].strip() if json_data.get('tag') else ''
if not validators.url(json_data['url'].strip()):
return "Invalid or unsupported URL", 400
if json_data.get('proxy'):
plist = self.datastore.proxy_list
if not json_data.get('proxy') in plist:
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
extras = {'title': json_data['title'].strip()} if json_data.get('title') else {}
extras = copy.deepcopy(json_data)
del extras['url']
new_uuid = self.datastore.add_watch(url=url, extras=extras)
if new_uuid:
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
return {'uuid': new_uuid}, 201
else:
return "Invalid or unsupported URL", 400
new_uuid = self.datastore.add_watch(url=json_data['url'].strip(), tag=tag, extras=extras)
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
return {'uuid': new_uuid}, 201
# Return concise list of available watches and some very basic info
# curl http://localhost:4000/api/v1/watch|python -mjson.tool
# ?recheck_all=1 to recheck all
@auth.check_token
def get(self):
"""
@api {get} /api/v1/watch List watches
@apiDescription Return concise list of available watches and some very basic info
@apiExample {curl} Example usage:
curl http://localhost:4000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45"
{
"6a4b7d5c-fee4-4616-9f43-4ac97046b595": {
"last_changed": 1677103794,
"last_checked": 1677103794,
"last_error": false,
"title": "",
"url": "http://www.quotationspage.com/random.php"
},
"e6f5fd5c-dbfe-468b-b8f3-f9d6ff5ad69b": {
"last_changed": 0,
"last_checked": 1676662819,
"last_error": false,
"title": "QuickLook",
"url": "https://github.com/QL-Win/QuickLook/tags"
}
}
@apiParam {String} [recheck_all] Optional Set to =1 to force recheck of all watches
@apiParam {String} [tag] Optional name of tag to limit results
@apiName ListWatches
@apiGroup Watch Management
@apiSuccess (200) {String} OK JSON dict
"""
list = {}
tag_limit = request.args.get('tag', None)
for k, watch in self.datastore.data['watching'].items():
if tag_limit:
if not tag_limit.lower() in watch.all_tags:
continue
list[k] = {'url': watch['url'],
'title': watch['title'],
'last_checked': watch['last_checked'],
'last_changed': watch.last_changed,
'last_error': watch['last_error']}
for k, v in self.datastore.data['watching'].items():
list[k] = {'url': v['url'],
'title': v['title'],
'last_checked': v['last_checked'],
'last_changed': v.last_changed,
'last_error': v['last_error']}
if request.args.get('recheck_all'):
for uuid in self.datastore.data['watching'].keys():
@@ -286,22 +132,6 @@ class SystemInfo(Resource):
@auth.check_token
def get(self):
"""
@api {get} /api/v1/systeminfo Return system info
@apiDescription Return some info about the current system state
@apiExample {curl} Example usage:
curl http://localhost:4000/api/v1/systeminfo -H"x-api-key:813031b16330fe25e3780cf0325daa45"
HTTP/1.0 200
{
'queue_size': 10 ,
'overdue_watches': ["watch-uuid-list"],
'uptime': 38344.55,
'watch_count': 800,
'version': "0.40.1"
}
@apiName Get Info
@apiGroup System Information
"""
import time
overdue_watches = []
@@ -320,11 +150,10 @@ class SystemInfo(Resource):
# Allow 5 minutes of grace time before we decide it's overdue
if time_since_check - (5 * 60) > t:
overdue_watches.append(uuid)
from changedetectionio import __version__ as main_version
return {
'queue_size': self.update_q.qsize(),
'overdue_watches': overdue_watches,
'uptime': round(time.time() - self.datastore.start_time, 2),
'watch_count': len(self.datastore.data.get('watching', {})),
'version': main_version
'watch_count': len(self.datastore.data.get('watching', {}))
}, 200

View File

@@ -3,8 +3,7 @@ import apprise
# Create our AppriseAsset and populate it with some of our new values:
# https://github.com/caronc/apprise/wiki/Development_API#the-apprise-asset-object
asset = apprise.AppriseAsset(
image_url_logo='https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png',
async_mode=False # Maybe related to https://github.com/dgtlmoon/changedetection.io/issues/1486
image_url_logo='https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png'
)
asset.app_id = "changedetection.io"

View File

@@ -23,10 +23,11 @@
from distutils.util import strtobool
from flask import Blueprint, request, make_response
from flask_login import login_required
import os
import logging
from changedetectionio.store import ChangeDetectionStore
from changedetectionio import login_optionally_required
browsersteps_live_ui_o = {}
browsersteps_playwright_browser_interface = None
browsersteps_playwright_browser_interface_browser = None
@@ -64,7 +65,7 @@ def construct_blueprint(datastore: ChangeDetectionStore):
browser_steps_blueprint = Blueprint('browser_steps', __name__, template_folder="templates")
@login_optionally_required
@login_required
@browser_steps_blueprint.route("/browsersteps_update", methods=['GET', 'POST'])
def browsersteps_ui_update():
import base64
@@ -106,8 +107,8 @@ def construct_blueprint(datastore: ChangeDetectionStore):
if step_operation == 'Goto site':
step_operation = 'goto_url'
step_optional_value = datastore.data['watching'][uuid].get('url')
step_selector = None
step_optional_value = None
step_selector = datastore.data['watching'][uuid].get('url')
# @todo try.. accept.. nice errors not popups..
try:

View File

@@ -25,14 +25,12 @@ browser_step_ui_config = {'Choose one': '0 0',
'Execute JS': '0 1',
# 'Extract text and use as filter': '1 0',
'Goto site': '0 0',
'Goto URL': '0 1',
'Press Enter': '0 0',
'Select by label': '1 1',
'Scroll down': '0 0',
'Uncheck checkbox': '1 0',
'Wait for seconds': '0 1',
'Wait for text': '0 1',
'Wait for text in element': '1 1',
# 'Press Page Down': '0 0',
# 'Press Page Up': '0 0',
# weird bug, come back to it later
@@ -55,7 +53,7 @@ class steppable_browser_interface():
print("> action calling", call_action_name)
# https://playwright.dev/python/docs/selectors#xpath-selectors
if selector and selector.startswith('/') and not selector.startswith('//'):
if selector.startswith('/') and not selector.startswith('//'):
selector = "xpath=" + selector
action_handler = getattr(self, "action_" + call_action_name)
@@ -74,10 +72,10 @@ class steppable_browser_interface():
self.page.wait_for_timeout(3 * 1000)
print("Call action done in", time.time() - now)
def action_goto_url(self, selector, value):
def action_goto_url(self, url, optional_value):
# self.page.set_viewport_size({"width": 1280, "height": 5000})
now = time.time()
response = self.page.goto(value, timeout=0, wait_until='commit')
response = self.page.goto(url, timeout=0, wait_until='commit')
# Wait_until = commit
# - `'commit'` - consider operation to be finished when network response is received and the document started loading.
@@ -134,17 +132,6 @@ class steppable_browser_interface():
def action_wait_for_seconds(self, selector, value):
self.page.wait_for_timeout(int(value) * 1000)
def action_wait_for_text(self, selector, value):
import json
v = json.dumps(value)
self.page.wait_for_function(f'document.querySelector("body").innerText.includes({v});', timeout=30000)
def action_wait_for_text_in_element(self, selector, value):
import json
s = json.dumps(selector)
v = json.dumps(value)
self.page.wait_for_function(f'document.querySelector({s}).innerText.includes({v});', timeout=30000)
# @todo - in the future make some popout interface to capture what needs to be set
# https://playwright.dev/python/docs/api/class-keyboard
def action_press_enter(self, selector, value):

View File

@@ -3,14 +3,11 @@
# Launch as a eventlet.wsgi server instance.
from distutils.util import strtobool
from json.decoder import JSONDecodeError
import eventlet
import eventlet.wsgi
import getopt
import os
import signal
import socket
import sys
from . import store, changedetection_app, content_fetcher
@@ -31,13 +28,11 @@ def sigterm_handler(_signo, _stack_frame):
def main():
global datastore
global app
datastore_path = None
do_cleanup = False
host = ''
ipv6_enabled = False
port = os.environ.get('PORT') or 5000
ssl_mode = False
host = ''
port = os.environ.get('PORT') or 5000
do_cleanup = False
datastore_path = None
# On Windows, create and use a default path.
if os.name == 'nt':
@@ -48,7 +43,7 @@ def main():
datastore_path = os.path.join(os.getcwd(), "../datastore")
try:
opts, args = getopt.getopt(sys.argv[1:], "6Ccsd:h:p:", "port")
opts, args = getopt.getopt(sys.argv[1:], "Ccsd:h:p:", "port")
except getopt.GetoptError:
print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path]')
sys.exit(2)
@@ -68,10 +63,6 @@ def main():
if opt == '-d':
datastore_path = arg
if opt == '-6':
print ("Enabling IPv6 listen support")
ipv6_enabled = True
# Cleanup (remove text files that arent in the index)
if opt == '-c':
do_cleanup = True
@@ -92,14 +83,8 @@ def main():
"Or use the -C parameter to create the directory.".format(app_config['datastore_path']), file=sys.stderr)
sys.exit(2)
try:
datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], version_tag=__version__)
except JSONDecodeError as e:
# Dont' start if the JSON DB looks corrupt
print ("ERROR: JSON DB or Proxy List JSON at '{}' appears to be corrupt, aborting".format(app_config['datastore_path']))
print(str(e))
return
datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], version_tag=__version__)
app = changedetection_app(app_config, datastore)
signal.signal(signal.SIGTERM, sigterm_handler)
@@ -139,15 +124,13 @@ def main():
from werkzeug.middleware.proxy_fix import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app, x_prefix=1, x_host=1)
s_type = socket.AF_INET6 if ipv6_enabled else socket.AF_INET
if ssl_mode:
# @todo finalise SSL config, but this should get you in the right direction if you need it.
eventlet.wsgi.server(eventlet.wrap_ssl(eventlet.listen((host, port), s_type),
eventlet.wsgi.server(eventlet.wrap_ssl(eventlet.listen((host, port)),
certfile='cert.pem',
keyfile='privkey.pem',
server_side=True), app)
else:
eventlet.wsgi.server(eventlet.listen((host, int(port)), s_type), app)
eventlet.wsgi.server(eventlet.listen((host, int(port))), app)

View File

@@ -1,4 +1,3 @@
import hashlib
from abc import abstractmethod
import chardet
import json
@@ -43,7 +42,7 @@ class BrowserStepsStepTimout(Exception):
class PageUnloadable(Exception):
def __init__(self, status_code, url, message, screenshot=False):
def __init__(self, status_code, url, screenshot=False, message=False):
# Set this so we can use it in other parts of the app
self.status_code = status_code
self.url = url
@@ -78,18 +77,18 @@ class ReplyWithContentButNoText(Exception):
return
class Fetcher():
error = None
status_code = None
content = None
headers = None
browser_steps = None
browser_steps_screenshot_path = None
content = None
error = None
fetcher_description = "No description"
headers = None
status_code = None
webdriver_js_execute_code = None
xpath_data = None
xpath_element_js = ""
instock_data = None
instock_data_js = ""
xpath_data = None
# Will be needed in the future by the VisualSelector, always get this where possible.
screenshot = False
@@ -103,7 +102,6 @@ class Fetcher():
from pkg_resources import resource_string
# The code that scrapes elements and makes a list of elements/size/position to click on in the VisualSelector
self.xpath_element_js = resource_string(__name__, "res/xpath_element_scraper.js").decode('utf-8')
self.instock_data_js = resource_string(__name__, "res/stock-not-in-stock.js").decode('utf-8')
@abstractmethod
@@ -118,8 +116,7 @@ class Fetcher():
request_body,
request_method,
ignore_status_codes=False,
current_include_filters=None,
is_binary=False):
current_include_filters=None):
# Should set self.error, self.status_code and self.content
pass
@@ -244,15 +241,10 @@ class base_html_playwright(Fetcher):
if proxy_override:
self.proxy = {'server': proxy_override}
if self.proxy:
# Playwright needs separate username and password values
from urllib.parse import urlparse
parsed = urlparse(self.proxy.get('server'))
if parsed.username:
self.proxy['username'] = parsed.username
self.proxy['password'] = parsed.password
def screenshot_step(self, step_n=''):
# There's a bug where we need to do it twice or it doesnt take the whole page, dont know why.
self.page.screenshot(type='jpeg', clip={'x': 1.0, 'y': 1.0, 'width': 1280, 'height': 1024})
screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=85)
if self.browser_steps_screenshot_path is not None:
@@ -275,8 +267,7 @@ class base_html_playwright(Fetcher):
request_body,
request_method,
ignore_status_codes=False,
current_include_filters=None,
is_binary=False):
current_include_filters=None):
from playwright.sync_api import sync_playwright
import playwright._impl._api_types
@@ -298,8 +289,8 @@ class base_html_playwright(Fetcher):
proxy=self.proxy,
# This is needed to enable JavaScript execution on GitHub and others
bypass_csp=True,
# Should be `allow` or `block` - sites like YouTube can transmit large amounts of data via Service Workers
service_workers=os.getenv('PLAYWRIGHT_SERVICE_WORKERS', 'allow'),
# Can't think why we need the service workers for our use case?
service_workers='block',
# Should never be needed
accept_downloads=False
)
@@ -308,34 +299,23 @@ class base_html_playwright(Fetcher):
if len(request_headers):
context.set_extra_http_headers(request_headers)
try:
self.page.set_default_navigation_timeout(90000)
self.page.set_default_timeout(90000)
# Listen for all console events and handle errors
self.page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}"))
# Goto page
try:
# Bug - never set viewport size BEFORE page.goto
# Waits for the next navigation. Using Python context manager
# prevents a race condition between clicking and waiting for a navigation.
response = self.page.goto(url, wait_until='commit')
# Wait_until = commit
# - `'commit'` - consider operation to be finished when network response is received and the document started loading.
# Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
# This seemed to solve nearly all 'TimeoutErrors'
response = self.page.goto(url, wait_until='commit')
except playwright._impl._api_types.Error as e:
# Retry once - https://github.com/browserless/chrome/issues/2485
# Sometimes errors related to invalid cert's and other can be random
print ("Content Fetcher > retrying request got error - ", str(e))
time.sleep(1)
response = self.page.goto(url, wait_until='commit')
except Exception as e:
print ("Content Fetcher > Other exception when page.goto", str(e))
context.close()
browser.close()
raise PageUnloadable(url=url, status_code=None, message=str(e))
# Execute any browser steps
try:
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
self.page.wait_for_timeout(extra_wait * 1000)
@@ -348,32 +328,43 @@ class base_html_playwright(Fetcher):
# This can be ok, we will try to grab what we could retrieve
pass
except Exception as e:
print ("Content Fetcher > Other exception when executing custom JS code", str(e))
print ("other exception when page.goto")
print (str(e))
context.close()
browser.close()
raise PageUnloadable(url=url, status_code=None, message=str(e))
raise PageUnloadable(url=url, status_code=None)
if response is None:
context.close()
browser.close()
print ("Content Fetcher > Response object was none")
print ("response object was none")
raise EmptyReply(url=url, status_code=None)
# Bug 2(?) Set the viewport size AFTER loading the page
self.page.set_viewport_size({"width": 1280, "height": 1024})
# Run Browser Steps here
self.iterate_browser_steps()
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
time.sleep(extra_wait)
self.content = self.page.content()
self.status_code = response.status
if len(self.page.content().strip()) == 0:
context.close()
browser.close()
print ("Content Fetcher > Content was empty")
raise EmptyReply(url=url, status_code=response.status)
print ("Content was empty")
raise EmptyReply(url=url, status_code=None)
# Bug 2(?) Set the viewport size AFTER loading the page
self.page.set_viewport_size({"width": 1280, "height": 1024})
self.status_code = response.status
self.content = self.page.content()
self.headers = response.all_headers()
# So we can find an element on the page where its selector was entered manually (maybe not xPath etc)
@@ -383,7 +374,6 @@ class base_html_playwright(Fetcher):
self.page.evaluate("var include_filters=''")
self.xpath_data = self.page.evaluate("async () => {" + self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) + "}")
self.instock_data = self.page.evaluate("async () => {" + self.instock_data_js + "}")
# Bug 3 in Playwright screenshot handling
# Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it
@@ -393,6 +383,8 @@ class base_html_playwright(Fetcher):
# which will significantly increase the IO size between the server and client, it's recommended to use the lowest
# acceptable screenshot quality here
try:
# Quality set to 1 because it's not used, just used as a work-around for a bug, no need to change this.
self.page.screenshot(type='jpeg', clip={'x': 1.0, 'y': 1.0, 'width': 1280, 'height': 1024}, quality=1)
# The actual screenshot
self.screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)))
except Exception as e:
@@ -452,8 +444,7 @@ class base_html_webdriver(Fetcher):
request_body,
request_method,
ignore_status_codes=False,
current_include_filters=None,
is_binary=False):
current_include_filters=None):
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
@@ -511,7 +502,7 @@ class base_html_webdriver(Fetcher):
try:
self.driver.quit()
except Exception as e:
print("Content Fetcher > Exception in chrome shutdown/quit" + str(e))
print("Exception in chrome shutdown/quit" + str(e))
# "html_requests" is listed as the default fetcher in store.py!
@@ -528,8 +519,7 @@ class html_requests(Fetcher):
request_body,
request_method,
ignore_status_codes=False,
current_include_filters=None,
is_binary=False):
current_include_filters=None):
# Make requests use a more modern looking user-agent
if not 'User-Agent' in request_headers:
@@ -559,12 +549,10 @@ class html_requests(Fetcher):
# For example - some sites don't tell us it's utf-8, but return utf-8 content
# This seems to not occur when using webdriver/selenium, it seems to detect the text encoding more reliably.
# https://github.com/psf/requests/issues/1604 good info about requests encoding detection
if not is_binary:
# Don't run this for PDF (and requests identified as binary) takes a _long_ time
if not r.headers.get('content-type') or not 'charset=' in r.headers.get('content-type'):
encoding = chardet.detect(r.content)['encoding']
if encoding:
r.encoding = encoding
if not r.headers.get('content-type') or not 'charset=' in r.headers.get('content-type'):
encoding = chardet.detect(r.content)['encoding']
if encoding:
r.encoding = encoding
if not r.content or not len(r.content):
raise EmptyReply(url=url, status_code=r.status_code)
@@ -576,14 +564,8 @@ class html_requests(Fetcher):
raise Non200ErrorCodeReceived(url=url, status_code=r.status_code, page_html=r.text)
self.status_code = r.status_code
if is_binary:
# Binary files just return their checksum until we add something smarter
self.content = hashlib.md5(r.content).hexdigest()
else:
self.content = r.text
self.content = r.text
self.headers = r.headers
self.raw_content = r.content
# Decide which is the 'real' HTML webdriver, this is more a system wide config

View File

@@ -0,0 +1,14 @@
FROM python:3.8-slim
# https://stackoverflow.com/questions/58701233/docker-logs-erroneously-appears-empty-until-container-stops
ENV PYTHONUNBUFFERED=1
WORKDIR /app
RUN [ ! -d "/datastore" ] && mkdir /datastore
COPY sleep.py /
CMD [ "python", "/sleep.py" ]

View File

@@ -0,0 +1,7 @@
import time
print ("Sleep loop, you should run your script from the console")
while True:
# Wait for 5 seconds
time.sleep(2)

View File

@@ -10,7 +10,7 @@ def same_slicer(l, a, b):
return l[a:b]
# like .compare but a little different output
def customSequenceMatcher(before, after, include_equal=False, include_removed=True, include_added=True, include_replaced=True, include_change_type_prefix=True):
def customSequenceMatcher(before, after, include_equal=False):
cruncher = difflib.SequenceMatcher(isjunk=lambda x: x in " \\t", a=before, b=after)
# @todo Line-by-line mode instead of buncghed, including `after` that is not in `before` (maybe unset?)
@@ -18,39 +18,34 @@ def customSequenceMatcher(before, after, include_equal=False, include_removed=Tr
if include_equal and tag == 'equal':
g = before[alo:ahi]
yield g
elif include_removed and tag == 'delete':
row_prefix = "(removed) " if include_change_type_prefix else ''
g = [ row_prefix + i for i in same_slicer(before, alo, ahi)]
elif tag == 'delete':
g = ["(removed) " + i for i in same_slicer(before, alo, ahi)]
yield g
elif include_replaced and tag == 'replace':
row_prefix = "(changed) " if include_change_type_prefix else ''
g = [row_prefix + i for i in same_slicer(before, alo, ahi)]
row_prefix = "(into) " if include_change_type_prefix else ''
g += [row_prefix + i for i in same_slicer(after, blo, bhi)]
elif tag == 'replace':
g = ["(changed) " + i for i in same_slicer(before, alo, ahi)]
g += ["(into ) " + i for i in same_slicer(after, blo, bhi)]
yield g
elif include_added and tag == 'insert':
row_prefix = "(added) " if include_change_type_prefix else ''
g = [row_prefix + i for i in same_slicer(after, blo, bhi)]
elif tag == 'insert':
g = ["(added ) " + i for i in same_slicer(after, blo, bhi)]
yield g
# only_differences - only return info about the differences, no context
# line_feed_sep could be "<br>" or "<li>" or "\n" etc
def render_diff(previous_version_file_contents, newest_version_file_contents, include_equal=False, include_removed=True, include_added=True, include_replaced=True, line_feed_sep="\n", include_change_type_prefix=True):
# line_feed_sep could be "<br/>" or "<li>" or "\n" etc
def render_diff(previous_file, newest_file, include_equal=False, line_feed_sep="\n"):
with open(newest_file, 'r') as f:
newest_version_file_contents = f.read()
newest_version_file_contents = [line.rstrip() for line in newest_version_file_contents.splitlines()]
newest_version_file_contents = [line.rstrip() for line in newest_version_file_contents.splitlines()]
if previous_version_file_contents:
if previous_file:
with open(previous_file, 'r') as f:
previous_version_file_contents = f.read()
previous_version_file_contents = [line.rstrip() for line in previous_version_file_contents.splitlines()]
else:
previous_version_file_contents = ""
rendered_diff = customSequenceMatcher(before=previous_version_file_contents,
after=newest_version_file_contents,
include_equal=include_equal,
include_removed=include_removed,
include_added=include_added,
include_replaced=include_replaced,
include_change_type_prefix=include_change_type_prefix)
rendered_diff = customSequenceMatcher(previous_version_file_contents,
newest_version_file_contents,
include_equal)
# Recursively join lists
f = lambda L: line_feed_sep.join([f(x) if type(x) is list else x for x in L])

View File

@@ -1,7 +1,4 @@
# HTML to TEXT/JSON DIFFERENCE FETCHER
import hashlib
import json
import logging
import os
import re
@@ -10,26 +7,18 @@ import urllib3
from changedetectionio import content_fetcher, html_tools
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
from copy import deepcopy
from . import difference_detection_processor
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
name = 'Webpage Text/HTML, JSON and PDF changes'
description = 'Detects all text changes where possible'
class FilterNotFoundInResponse(ValueError):
def __init__(self, msg):
ValueError.__init__(self, msg)
class PDFToHTMLToolNotFound(ValueError):
def __init__(self, msg):
ValueError.__init__(self, msg)
# Some common stuff here that can be moved to a base class
# (set_proxy_from_list)
class perform_site_check(difference_detection_processor):
class perform_site_check():
screenshot = None
xpath_data = None
@@ -59,7 +48,7 @@ class perform_site_check(difference_detection_processor):
watch = deepcopy(self.datastore.data['watching'].get(uuid))
if not watch:
raise Exception("Watch no longer exists.")
return
# Protect against file:// access
if re.search(r'^file', watch.get('url', ''), re.IGNORECASE) and not os.getenv('ALLOW_FILE_URI', False):
@@ -97,10 +86,7 @@ class perform_site_check(difference_detection_processor):
is_source = True
# Pluggable content fetcher
prefer_backend = watch.get_fetch_backend
if not prefer_backend or prefer_backend == 'system':
prefer_backend = self.datastore.data['settings']['application']['fetch_backend']
prefer_backend = watch.get('fetch_backend')
if hasattr(content_fetcher, prefer_backend):
klass = getattr(content_fetcher, prefer_backend)
else:
@@ -130,18 +116,12 @@ class perform_site_check(difference_detection_processor):
if watch.get('webdriver_js_execute_code') is not None and watch.get('webdriver_js_execute_code').strip():
fetcher.webdriver_js_execute_code = watch.get('webdriver_js_execute_code')
# requests for PDF's, images etc should be passwd the is_binary flag
is_binary = watch.is_pdf
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'), is_binary=is_binary)
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'))
fetcher.quit()
self.screenshot = fetcher.screenshot
self.xpath_data = fetcher.xpath_data
# Track the content type
update_obj['content_type'] = fetcher.headers.get('Content-Type', '')
# Watches added automatically in the queue manager will skip if its the same checksum as the previous run
# Saves a lot of CPU
update_obj['previous_md5_before_filters'] = hashlib.md5(fetcher.content.encode('utf-8')).hexdigest()
@@ -168,31 +148,6 @@ class perform_site_check(difference_detection_processor):
is_html = False
is_json = False
if watch.is_pdf or 'application/pdf' in fetcher.headers.get('Content-Type', '').lower():
from shutil import which
tool = os.getenv("PDF_TO_HTML_TOOL", "pdftohtml")
if not which(tool):
raise PDFToHTMLToolNotFound("Command-line `{}` tool was not found in system PATH, was it installed?".format(tool))
import subprocess
proc = subprocess.Popen(
[tool, '-stdout', '-', '-s', 'out.pdf', '-i'],
stdout=subprocess.PIPE,
stdin=subprocess.PIPE)
proc.stdin.write(fetcher.raw_content)
proc.stdin.close()
fetcher.content = proc.stdout.read().decode('utf-8')
proc.wait(timeout=60)
# Add a little metadata so we know if the file changes (like if an image changes, but the text is the same
# @todo may cause problems with non-UTF8?
metadata = "<p>Added by changedetection.io: Document checksum - {} Filesize - {} bytes</p>".format(
hashlib.md5(fetcher.raw_content).hexdigest().upper(),
len(fetcher.content))
fetcher.content = fetcher.content.replace('</body>', metadata + '</body>')
include_filters_rule = deepcopy(watch.get('include_filters', []))
# include_filters_rule = watch['include_filters']
subtractive_selectors = watch.get(
@@ -212,14 +167,6 @@ class perform_site_check(difference_detection_processor):
include_filters_rule.append("json:$")
has_filter_rule = True
if is_json:
# Sort the JSON so we dont get false alerts when the content is just re-ordered
try:
fetcher.content = json.dumps(json.loads(fetcher.content), sort_keys=True)
except Exception as e:
# Might have just been a snippet, or otherwise bad JSON, continue
pass
if has_filter_rule:
json_filter_prefixes = ['json:', 'jq:']
for filter in include_filters_rule:
@@ -227,8 +174,6 @@ class perform_site_check(difference_detection_processor):
stripped_text_from_html += html_tools.extract_json_as_string(content=fetcher.content, json_filter=filter)
is_html = False
if is_html or is_source:
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
@@ -279,34 +224,6 @@ class perform_site_check(difference_detection_processor):
# Re #340 - return the content before the 'ignore text' was applied
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
# @todo whitespace coming from missing rtrim()?
# stripped_text_from_html could be based on their preferences, replace the processed text with only that which they want to know about.
# Rewrite's the processing text based on only what diff result they want to see
if watch.has_special_diff_filter_options_set() and len(watch.history.keys()):
# Now the content comes from the diff-parser and not the returned HTTP traffic, so could be some differences
from .. import diff
# needs to not include (added) etc or it may get used twice
# Replace the processed text with the preferred result
rendered_diff = diff.render_diff(previous_version_file_contents=watch.get_last_fetched_before_filters(),
newest_version_file_contents=stripped_text_from_html,
include_equal=False, # not the same lines
include_added=watch.get('filter_text_added', True),
include_removed=watch.get('filter_text_removed', True),
include_replaced=watch.get('filter_text_replaced', True),
line_feed_sep="\n",
include_change_type_prefix=False)
watch.save_last_fetched_before_filters(text_content_before_ignored_filter)
if not rendered_diff and stripped_text_from_html:
# We had some content, but no differences were found
# Store our new file as the MD5 so it will trigger in the future
c = hashlib.md5(text_content_before_ignored_filter.translate(None, b'\r\n\t ')).hexdigest()
return False, {'previous_md5': c}, stripped_text_from_html.encode('utf-8')
else:
stripped_text_from_html = rendered_diff
# Treat pages with no renderable text content as a change? No by default
empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0:
@@ -365,7 +282,6 @@ class perform_site_check(difference_detection_processor):
blocked = True
# Filter and trigger works the same, so reuse it
# It should return the line numbers that match
# Unblock flow if the trigger was found (some text remained after stripped what didnt match)
result = html_tools.strip_ignore_text(content=str(stripped_text_from_html),
wordlist=trigger_text,
mode="line numbers")

View File

@@ -138,7 +138,7 @@ class ValidateContentFetcherIsReady(object):
from changedetectionio import content_fetcher
# Better would be a radiohandler that keeps a reference to each class
if field.data is not None and field.data != 'system':
if field.data is not None:
klass = getattr(content_fetcher, field.data)
some_object = klass()
try:
@@ -147,12 +147,12 @@ class ValidateContentFetcherIsReady(object):
except urllib3.exceptions.MaxRetryError as e:
driver_url = some_object.command_executor
message = field.gettext('Content fetcher \'%s\' did not respond.' % (field.data))
message += '<br>' + field.gettext(
message += '<br/>' + field.gettext(
'Be sure that the selenium/webdriver runner is running and accessible via network from this container/host.')
message += '<br>' + field.gettext('Did you follow the instructions in the wiki?')
message += '<br><br>' + field.gettext('WebDriver Host: %s' % (driver_url))
message += '<br><a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">Go here for more information</a>'
message += '<br>'+field.gettext('Content fetcher did not respond properly, unable to use it.\n %s' % (str(e)))
message += '<br/>' + field.gettext('Did you follow the instructions in the wiki?')
message += '<br/><br/>' + field.gettext('WebDriver Host: %s' % (driver_url))
message += '<br/><a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">Go here for more information</a>'
message += '<br/>'+field.gettext('Content fetcher did not respond properly, unable to use it.\n %s' % (str(e)))
raise ValidationError(message)
@@ -232,17 +232,12 @@ class validateURL(object):
def __call__(self, form, field):
import validators
try:
validators.url(field.data.strip())
except validators.ValidationFailure:
message = field.gettext('\'%s\' is not a valid URL.' % (field.data.strip()))
raise ValidationError(message)
from .model.Watch import is_safe_url
if not is_safe_url(field.data):
raise ValidationError('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX')
class ValidateListRegex(object):
"""
@@ -344,12 +339,9 @@ class ValidateCSSJSONXPATHInput(object):
raise ValidationError("A system-error occurred when validating your jq expression")
class quickWatchForm(Form):
from . import processors
url = fields.URLField('URL', validators=[validateURL()])
tag = StringField('Group tag', [validators.Optional()])
watch_submit_button = SubmitField('Watch', render_kw={"class": "pure-button pure-button-primary"})
processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff")
edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"})
@@ -363,10 +355,6 @@ class commonSettingsForm(Form):
extract_title_as_title = BooleanField('Extract <title> from document and use as watch title', default=False)
webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1,
message="Should contain one or more seconds")])
class importForm(Form):
from . import processors
processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff")
urls = TextAreaField('URLs')
class SingleBrowserStep(Form):
@@ -399,19 +387,11 @@ class watchForm(commonSettingsForm):
body = TextAreaField('Request body', [validators.Optional()])
method = SelectField('Request method', choices=valid_method, default=default_method)
ignore_status_codes = BooleanField('Ignore status codes (process non-2xx status codes as normal)', default=False)
check_unique_lines = BooleanField('Only trigger when unique lines appear', default=False)
filter_text_added = BooleanField('Added lines', default=True)
filter_text_replaced = BooleanField('Replaced/changed lines', default=True)
filter_text_removed = BooleanField('Removed lines', default=True)
# @todo this class could be moved to its own text_json_diff_watchForm and this goes to restock_diff_Watchform perhaps
in_stock_only = BooleanField('Only trigger when product goes BACK to in-stock', default=True)
check_unique_lines = BooleanField('Only trigger when new lines appear', default=False)
trigger_text = StringListField('Trigger/wait for text', [validators.Optional(), ValidateListRegex()])
if os.getenv("PLAYWRIGHT_DRIVER_URL"):
browser_steps = FieldList(FormField(SingleBrowserStep), min_entries=10)
text_should_not_be_present = StringListField('Block change-detection while text matches', [validators.Optional(), ValidateListRegex()])
text_should_not_be_present = StringListField('Block change-detection if text matches', [validators.Optional(), ValidateListRegex()])
webdriver_js_execute_code = TextAreaField('Execute JavaScript before change detection', render_kw={"rows": "5"}, validators=[validators.Optional()])
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
@@ -446,13 +426,6 @@ class watchForm(commonSettingsForm):
return result
class SingleExtraProxy(Form):
# maybe better to set some <script>var..
proxy_name = StringField('Name', [validators.Optional()], render_kw={"placeholder": "Name"})
proxy_url = StringField('Proxy URL', [validators.Optional()], render_kw={"placeholder": "http://user:pass@...:3128", "size":50})
# @todo do the validation here instead
# datastore.data['settings']['requests']..
class globalSettingsRequestForm(Form):
time_between_check = FormField(TimeBetweenCheckForm)
@@ -460,30 +433,21 @@ class globalSettingsRequestForm(Form):
jitter_seconds = IntegerField('Random jitter seconds ± check',
render_kw={"style": "width: 5em;"},
validators=[validators.NumberRange(min=0, message="Should contain zero or more seconds")])
extra_proxies = FieldList(FormField(SingleExtraProxy), min_entries=5)
def validate_extra_proxies(self, extra_validators=None):
for e in self.data['extra_proxies']:
if e.get('proxy_name') or e.get('proxy_url'):
if not e.get('proxy_name','').strip() or not e.get('proxy_url','').strip():
self.extra_proxies.errors.append('Both a name, and a Proxy URL is required.')
return False
# datastore.data['settings']['application']..
class globalSettingsApplicationForm(commonSettingsForm):
api_access_token_enabled = BooleanField('API access token security check enabled', default=True, validators=[validators.Optional()])
base_url = StringField('Base URL', validators=[validators.Optional()])
empty_pages_are_a_change = BooleanField('Treat empty pages as a change?', default=False)
fetch_backend = RadioField('Fetch Method', default="html_requests", choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
global_ignore_text = StringListField('Ignore Text', [ValidateListRegex()])
global_subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)])
global_ignore_text = StringListField('Ignore Text', [ValidateListRegex()])
ignore_whitespace = BooleanField('Ignore whitespace')
password = SaltyPasswordField()
removepassword_button = SubmitField('Remove password', render_kw={"class": "pure-button pure-button-primary"})
empty_pages_are_a_change = BooleanField('Treat empty pages as a change?', default=False)
render_anchor_tag_content = BooleanField('Render anchor tag content', default=False)
shared_diff_access = BooleanField('Allow access to view diff page when password is enabled', default=False, validators=[validators.Optional()])
fetch_backend = RadioField('Fetch Method', default="html_requests", choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
api_access_token_enabled = BooleanField('API access token security check enabled', default=True, validators=[validators.Optional()])
password = SaltyPasswordField()
filter_failure_notification_threshold_attempts = IntegerField('Number of times the filter can be missing before sending a notification',
render_kw={"style": "width: 5em;"},
validators=[validators.NumberRange(min=0,

View File

@@ -8,7 +8,7 @@ import json
import re
# HTML added to be sure each result matching a filter (.example) gets converted to a new line by Inscriptis
TEXT_FILTER_LIST_LINE_SUFFIX = "<br>"
TEXT_FILTER_LIST_LINE_SUFFIX = "<br/>"
# 'price' , 'lowPrice', 'highPrice' are usually under here
# all of those may or may not appear on different websites
@@ -287,18 +287,3 @@ def workarounds_for_obfuscations(content):
content = re.sub('<!--\s+-->', '', content)
return content
def get_triggered_text(content, trigger_text):
triggered_text = []
result = strip_ignore_text(content=content,
wordlist=trigger_text,
mode="line numbers")
i = 1
for p in content.splitlines():
if i in result:
triggered_text.append(p)
i += 1
return triggered_text

View File

@@ -29,7 +29,6 @@ class import_url_list(Importer):
data,
flash,
datastore,
processor=None
):
urls = data.split("\n")
@@ -53,11 +52,7 @@ class import_url_list(Importer):
# Flask wtform validators wont work with basic auth, use validators package
# Up to 5000 per batch so we dont flood the server
if len(url) and validators.url(url.replace('source:', '')) and good < 5000:
extras = None
if processor:
extras = {'processor': processor}
new_uuid = datastore.add_watch(url=url.strip(), tag=tags, write_to_disk_now=False, extras=extras)
new_uuid = datastore.add_watch(url=url.strip(), tag=tags, write_to_disk_now=False)
if new_uuid:
# Straight into the queue.
self.new_uuids.append(new_uuid)

View File

@@ -15,12 +15,11 @@ class model(dict):
'headers': {
},
'requests': {
'extra_proxies': [], # Configurable extra proxies via the UI
'jitter_seconds': 0,
'proxy': None, # Preferred proxy connection
'time_between_check': {'weeks': None, 'days': None, 'hours': 3, 'minutes': None, 'seconds': None},
'timeout': int(getenv("DEFAULT_SETTINGS_REQUESTS_TIMEOUT", "45")), # Default 45 seconds
'time_between_check': {'weeks': None, 'days': None, 'hours': 3, 'minutes': None, 'seconds': None},
'jitter_seconds': 0,
'workers': int(getenv("DEFAULT_SETTINGS_REQUESTS_WORKERS", "10")), # Number of threads, lower is better for slow connections
'proxy': None # Preferred proxy connection
},
'application': {
'api_access_token_enabled': True,
@@ -28,6 +27,7 @@ class model(dict):
'base_url' : None,
'extract_title_as_title': False,
'empty_pages_are_a_change': False,
'css_dark_mode': False,
'fetch_backend': getenv("DEFAULT_FETCH_BACKEND", "html_requests"),
'filter_failure_notification_threshold_attempts': _FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT,
'global_ignore_text': [], # List of text to ignore when calculating the comparison checksum
@@ -40,7 +40,6 @@ class model(dict):
'notification_body': default_notification_body,
'notification_format': default_notification_format,
'schema_version' : 0,
'shared_diff_access': False,
'webdriver_delay': None # Extra delay in seconds before extracting text
}
}

View File

@@ -1,14 +1,9 @@
from distutils.util import strtobool
import logging
import os
import re
import time
import uuid
# Allowable protocols, protects against javascript: etc
# file:// is further checked by ALLOW_FILE_URI
SAFE_PROTOCOL_REGEX='^(http|https|ftp|file):'
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
@@ -16,79 +11,60 @@ from changedetectionio.notification import (
default_notification_format_for_watch
)
base_config = {
'body': None,
'check_unique_lines': False, # On change-detected, compare against all history if its something new
'check_count': 0,
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
'extract_text': [], # Extract text by regex after filters
'extract_title_as_title': False,
'fetch_backend': 'system', # plaintext, playwright etc
'processor': 'text_json_diff', # could be restock_diff or others from .processors
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
'filter_text_added': True,
'filter_text_replaced': True,
'filter_text_removed': True,
'has_ldjson_price_data': None,
'track_ldjson_price_data': None,
'headers': {}, # Extra headers to send
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
'in_stock_only' : True, # Only trigger change on going to instock from out-of-stock
'include_filters': [],
'last_checked': 0,
'last_error': False,
'last_viewed': 0, # history key value of the last viewed via the [diff] link
'method': 'GET',
# Custom notification content
'notification_body': None,
'notification_format': default_notification_format_for_watch,
'notification_muted': False,
'notification_title': None,
'notification_screenshot': False, # Include the latest screenshot if available and supported by the apprise URL
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
'paused': False,
'previous_md5': False,
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
'proxy': None, # Preferred proxy connection
'subtractive_selectors': [],
'tag': None,
'text_should_not_be_present': [], # Text that should not present
# Re #110, so then if this is set to None, we know to use the default value instead
# Requires setting to None on submit if it's the same as the default
# Should be all None by default, so we use the system default in this case.
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None},
'title': None,
'trigger_text': [], # List of text or regex to wait for until a change is detected
'url': '',
'uuid': str(uuid.uuid4()),
'webdriver_delay': None,
'webdriver_js_execute_code': None, # Run before change-detection
}
def is_safe_url(test_url):
# See https://github.com/dgtlmoon/changedetection.io/issues/1358
# Remove 'source:' prefix so we dont get 'source:javascript:' etc
# 'source:' is a valid way to tell us to return the source
r = re.compile(re.escape('source:'), re.IGNORECASE)
test_url = r.sub('', test_url)
pattern = re.compile(os.getenv('SAFE_PROTOCOL_REGEX', SAFE_PROTOCOL_REGEX), re.IGNORECASE)
if not pattern.match(test_url.strip()):
return False
return True
class model(dict):
__newest_history_key = None
__history_n = 0
__base_config = {
# 'history': {}, # Dict of timestamp and output stripped filename (removed)
# 'newest_history_key': 0, (removed, taken from history.txt index)
'body': None,
'check_unique_lines': False, # On change-detected, compare against all history if its something new
'check_count': 0,
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
'extract_text': [], # Extract text by regex after filters
'extract_title_as_title': False,
'fetch_backend': None,
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
'has_ldjson_price_data': None,
'track_ldjson_price_data': None,
'headers': {}, # Extra headers to send
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
'include_filters': [],
'last_checked': 0,
'last_error': False,
'last_viewed': 0, # history key value of the last viewed via the [diff] link
'method': 'GET',
# Custom notification content
'notification_body': None,
'notification_format': default_notification_format_for_watch,
'notification_muted': False,
'notification_title': None,
'notification_screenshot': False, # Include the latest screenshot if available and supported by the apprise URL
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
'paused': False,
'previous_md5': False,
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
'proxy': None, # Preferred proxy connection
'subtractive_selectors': [],
'tag': None,
'text_should_not_be_present': [], # Text that should not present
# Re #110, so then if this is set to None, we know to use the default value instead
# Requires setting to None on submit if it's the same as the default
# Should be all None by default, so we use the system default in this case.
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None},
'title': None,
'trigger_text': [], # List of text or regex to wait for until a change is detected
'url': None,
'uuid': str(uuid.uuid4()),
'webdriver_delay': None,
'webdriver_js_execute_code': None, # Run before change-detection
}
jitter_seconds = 0
def __init__(self, *arg, **kw):
self.update(base_config)
self.update(self.__base_config)
self.__datastore_path = kw['datastore_path']
self['uuid'] = str(uuid.uuid4())
@@ -119,11 +95,7 @@ class model(dict):
@property
def link(self):
url = self.get('url', '')
if not is_safe_url(url):
return 'DISABLED'
ready_url = url
if '{%' in url or '{{' in url:
from jinja2 import Environment
@@ -142,26 +114,6 @@ class model(dict):
return ready_url
@property
def get_fetch_backend(self):
"""
Like just using the `fetch_backend` key but there could be some logic
:return:
"""
# Maybe also if is_image etc?
# This is because chrome/playwright wont render the PDF in the browser and we will just fetch it and use pdf2html to see the text.
if self.is_pdf:
return 'html_requests'
return self.get('fetch_backend')
@property
def is_pdf(self):
# content_type field is set in the future
# https://github.com/dgtlmoon/changedetection.io/issues/1392
# Not sure the best logic here
return self.get('url', '').lower().endswith('.pdf') or 'pdf' in self.get('content_type', '').lower()
@property
def label(self):
# Used for sorting
@@ -244,32 +196,9 @@ class model(dict):
bump = self.history
return self.__newest_history_key
def get_history_snapshot(self, timestamp):
import brotli
filepath = self.history[timestamp]
# See if a brotli versions exists and switch to that
if not filepath.endswith('.br') and os.path.isfile(f"{filepath}.br"):
filepath = f"{filepath}.br"
# OR in the backup case that the .br does not exist, but the plain one does
if filepath.endswith('.br') and not os.path.isfile(filepath):
if os.path.isfile(filepath.replace('.br', '')):
filepath = filepath.replace('.br', '')
if filepath.endswith('.br'):
# Brotli doesnt have a fileheader to detect it, so we rely on filename
# https://www.rfc-editor.org/rfc/rfc7932
with open(filepath, 'rb') as f:
return(brotli.decompress(f.read()).decode('utf-8'))
with open(filepath, 'r', encoding='utf-8', errors='ignore') as f:
return f.read()
# Save some text file to the appropriate path and bump the history
# result_obj from fetch_site_status.run()
def save_history_text(self, contents, timestamp, snapshot_id):
import brotli
def save_history_text(self, contents, timestamp):
self.ensure_data_dir_exists()
@@ -278,21 +207,13 @@ class model(dict):
if self.__newest_history_key and int(timestamp) == int(self.__newest_history_key):
time.sleep(timestamp - self.__newest_history_key)
threshold = int(os.getenv('SNAPSHOT_BROTLI_COMPRESSION_THRESHOLD', 1024))
skip_brotli = strtobool(os.getenv('DISABLE_BROTLI_TEXT_SNAPSHOT', 'False'))
snapshot_fname = "{}.txt".format(str(uuid.uuid4()))
if not skip_brotli and len(contents) > threshold:
snapshot_fname = f"{snapshot_id}.txt.br"
dest = os.path.join(self.watch_data_dir, snapshot_fname)
if not os.path.exists(dest):
with open(dest, 'wb') as f:
f.write(brotli.compress(contents, mode=brotli.MODE_TEXT))
else:
snapshot_fname = f"{snapshot_id}.txt"
dest = os.path.join(self.watch_data_dir, snapshot_fname)
if not os.path.exists(dest):
with open(dest, 'wb') as f:
f.write(contents)
# in /diff/ and /preview/ we are going to assume for now that it's UTF-8 when reading
# most sites are utf-8 and some are even broken utf-8
with open(os.path.join(self.watch_data_dir, snapshot_fname), 'wb') as f:
f.write(contents)
f.close()
# Append to index
# @todo check last char was \n
@@ -329,8 +250,7 @@ class model(dict):
# Compare each lines (set) against each history text file (set) looking for something new..
existing_history = set({})
for k, v in self.history.items():
content = self.get_history_snapshot(k)
alist = set([line.strip().lower() for line in content.splitlines()])
alist = set([line.decode('utf-8').strip().lower() for line in open(v, 'rb')])
existing_history = existing_history.union(alist)
# Check that everything in local_lines(new stuff) already exists in existing_history - it should
@@ -345,6 +265,17 @@ class model(dict):
# False is not an option for AppRise, must be type None
return None
def get_screenshot_as_jpeg(self):
# Created by save_screenshot()
fname = os.path.join(self.watch_data_dir, "last-screenshot.jpg")
if os.path.isfile(fname):
return fname
# False is not an option for AppRise, must be type None
return None
def __get_file_ctime(self, filename):
fname = os.path.join(self.watch_data_dir, filename)
if os.path.isfile(fname):
@@ -391,25 +322,6 @@ class model(dict):
return fname
return False
def pause(self):
self['paused'] = True
def unpause(self):
self['paused'] = False
def toggle_pause(self):
self['paused'] ^= True
def mute(self):
self['notification_muted'] = True
def unmute(self):
self['notification_muted'] = False
def toggle_mute(self):
self['notification_muted'] ^= True
def extract_regex_from_all_history(self, regex):
import csv
import re
@@ -421,8 +333,8 @@ class model(dict):
# self.history will be keyed with the full path
for k, fname in self.history.items():
if os.path.isfile(fname):
if True:
contents = self.get_history_snapshot(k)
with open(fname, "r") as f:
contents = f.read()
res = re.findall(regex, contents, re.MULTILINE)
if res:
if not csv_writer:
@@ -453,43 +365,3 @@ class model(dict):
f.close()
return csv_output_filename
@property
# Return list of tags, stripped and lowercase, used for searching
def all_tags(self):
return [s.strip().lower() for s in self.get('tag','').split(',')]
def has_special_diff_filter_options_set(self):
# All False - nothing would be done, so act like it's not processable
if not self.get('filter_text_added', True) and not self.get('filter_text_replaced', True) and not self.get('filter_text_removed', True):
return False
# Or one is set
if not self.get('filter_text_added', True) or not self.get('filter_text_replaced', True) or not self.get('filter_text_removed', True):
return True
# None is set
return False
def get_last_fetched_before_filters(self):
import brotli
filepath = os.path.join(self.watch_data_dir, 'last-fetched.br')
if not os.path.isfile(filepath):
# If a previous attempt doesnt yet exist, just snarf the previous snapshot instead
dates = list(self.history.keys())
if len(dates):
return self.get_history_snapshot(dates[-1])
else:
return ''
with open(filepath, 'rb') as f:
return(brotli.decompress(f.read()).decode('utf-8'))
def save_last_fetched_before_filters(self, contents):
import brotli
filepath = os.path.join(self.watch_data_dir, 'last-fetched.br')
with open(filepath, 'wb') as f:
f.write(brotli.compress(contents, mode=brotli.MODE_TEXT))

View File

@@ -5,18 +5,15 @@ import json
valid_tokens = {
'base_url': '',
'current_snapshot': '',
'diff': '',
'diff_added': '',
'diff_full': '',
'diff_removed': '',
'diff_url': '',
'preview_url': '',
'triggered_text': '',
'watch_tag': '',
'watch_title': '',
'watch_url': '',
'watch_uuid': '',
'watch_title': '',
'watch_tag': '',
'diff': '',
'diff_full': '',
'diff_url': '',
'preview_url': '',
'current_snapshot': ''
}
default_notification_format_for_watch = 'System default'
@@ -123,10 +120,10 @@ def process_notification(n_object, datastore):
url += k + 'avatar_url=https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png'
if url.startswith('tgram://'):
# Telegram only supports a limit subset of HTML, remove the '<br>' we place in.
# Telegram only supports a limit subset of HTML, remove the '<br/>' we place in.
# re https://github.com/dgtlmoon/changedetection.io/issues/555
# @todo re-use an existing library we have already imported to strip all non-allowed tags
n_body = n_body.replace('<br>', '\n')
n_body = n_body.replace('<br/>', '\n')
n_body = n_body.replace('</br>', '\n')
# real limit is 4096, but minus some for extra metadata
payload_max_size = 3600
@@ -212,18 +209,15 @@ def create_notification_parameters(n_object, datastore):
tokens.update(
{
'base_url': base_url if base_url is not None else '',
'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else '',
'diff': n_object.get('diff', ''), # Null default in the case we use a test
'diff_added': n_object.get('diff_added', ''), # Null default in the case we use a test
'diff_full': n_object.get('diff_full', ''), # Null default in the case we use a test
'diff_removed': n_object.get('diff_removed', ''), # Null default in the case we use a test
'diff_url': diff_url,
'preview_url': preview_url,
'triggered_text': n_object.get('triggered_text', ''),
'watch_tag': watch_tag if watch_tag is not None else '',
'watch_title': watch_title if watch_title is not None else '',
'watch_url': watch_url,
'watch_uuid': uuid,
'watch_title': watch_title if watch_title is not None else '',
'watch_tag': watch_tag if watch_tag is not None else '',
'diff_url': diff_url,
'diff': n_object.get('diff', ''), # Null default in the case we use a test
'diff_full': n_object.get('diff_full', ''), # Null default in the case we use a test
'preview_url': preview_url,
'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else ''
})
return tokens

View File

@@ -1,11 +0,0 @@
# Change detection post-processors
The concept here is to be able to switch between different domain specific problems to solve.
- `text_json_diff` The traditional text and JSON comparison handler
- `restock_diff` Only cares about detecting if a product looks like it has some text that suggests that it's out of stock, otherwise assumes that it's in stock.
Some suggestions for the future
- `graphical`
- `restock_and_price` - extract price AND stock text

View File

@@ -1,24 +0,0 @@
from abc import abstractmethod
import hashlib
class difference_detection_processor():
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@abstractmethod
def run(self, uuid, skip_when_checksum_same=True):
update_obj = {'last_notification_error': False, 'last_error': False}
some_data = 'xxxxx'
update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()
changed_detected = False
return changed_detected, update_obj, ''.encode('utf-8')
def available_processors():
from . import restock_diff, text_json_diff
x=[('text_json_diff', text_json_diff.name), ('restock_diff', restock_diff.name)]
# @todo Make this smarter with introspection of sorts.
return x

View File

@@ -1,125 +0,0 @@
import hashlib
import os
import re
import urllib3
from . import difference_detection_processor
from changedetectionio import content_fetcher
from copy import deepcopy
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
name = 'Re-stock detection for single product pages'
description = 'Detects if the product goes back to in-stock'
class perform_site_check(difference_detection_processor):
screenshot = None
xpath_data = None
def __init__(self, *args, datastore, **kwargs):
super().__init__(*args, **kwargs)
self.datastore = datastore
def run(self, uuid, skip_when_checksum_same=True):
# DeepCopy so we can be sure we don't accidently change anything by reference
watch = deepcopy(self.datastore.data['watching'].get(uuid))
if not watch:
raise Exception("Watch no longer exists.")
# Protect against file:// access
if re.search(r'^file', watch.get('url', ''), re.IGNORECASE) and not os.getenv('ALLOW_FILE_URI', False):
raise Exception(
"file:// type access is denied for security reasons."
)
# Unset any existing notification error
update_obj = {'last_notification_error': False, 'last_error': False}
extra_headers = watch.get('headers', [])
# Tweak the base config with the per-watch ones
request_headers = deepcopy(self.datastore.data['settings']['headers'])
request_headers.update(extra_headers)
# https://github.com/psf/requests/issues/4525
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
# do this by accident.
if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']:
request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
timeout = self.datastore.data['settings']['requests'].get('timeout')
url = watch.link
request_body = self.datastore.data['watching'][uuid].get('body')
request_method = self.datastore.data['watching'][uuid].get('method')
ignore_status_codes = self.datastore.data['watching'][uuid].get('ignore_status_codes', False)
# Pluggable content fetcher
prefer_backend = watch.get_fetch_backend
if not prefer_backend or prefer_backend == 'system':
prefer_backend = self.datastore.data['settings']['application']['fetch_backend']
if hasattr(content_fetcher, prefer_backend):
klass = getattr(content_fetcher, prefer_backend)
else:
# If the klass doesnt exist, just use a default
klass = getattr(content_fetcher, "html_requests")
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
proxy_url = None
if proxy_id:
proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
print("UUID {} Using proxy {}".format(uuid, proxy_url))
fetcher = klass(proxy_override=proxy_url)
# Configurable per-watch or global extra delay before extracting text (for webDriver types)
system_webdriver_delay = self.datastore.data['settings']['application'].get('webdriver_delay', None)
if watch['webdriver_delay'] is not None:
fetcher.render_extract_delay = watch.get('webdriver_delay')
elif system_webdriver_delay is not None:
fetcher.render_extract_delay = system_webdriver_delay
# Could be removed if requests/plaintext could also return some info?
if prefer_backend != 'html_webdriver':
raise Exception("Re-stock detection requires Chrome or compatible webdriver/playwright fetcher to work")
if watch.get('webdriver_js_execute_code') is not None and watch.get('webdriver_js_execute_code').strip():
fetcher.webdriver_js_execute_code = watch.get('webdriver_js_execute_code')
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'))
fetcher.quit()
self.screenshot = fetcher.screenshot
self.xpath_data = fetcher.xpath_data
# Track the content type
update_obj['content_type'] = fetcher.headers.get('Content-Type', '')
update_obj["last_check_status"] = fetcher.get_last_status_code()
# Main detection method
fetched_md5 = None
if fetcher.instock_data:
fetched_md5 = hashlib.md5(fetcher.instock_data.encode('utf-8')).hexdigest()
# 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold.
update_obj["in_stock"] = True if fetcher.instock_data == 'Possibly in stock' else False
# The main thing that all this at the moment comes down to :)
changed_detected = False
if watch.get('previous_md5') and watch.get('previous_md5') != fetched_md5:
# Yes if we only care about it going to instock, AND we are in stock
if watch.get('in_stock_only') and update_obj["in_stock"]:
changed_detected = True
if not watch.get('in_stock_only'):
# All cases
changed_detected = True
# Always record the new checksum
update_obj["previous_md5"] = fetched_md5
return changed_detected, update_obj, fetcher.instock_data.encode('utf-8')

View File

@@ -1,57 +0,0 @@
function isItemInStock() {
// @todo Pass these in so the same list can be used in non-JS fetchers
const outOfStockTexts = [
'0 in stock',
'agotado',
'artikel zurzeit vergriffen',
'available for back order',
'backordered',
'brak w magazynie',
'brak na stanie',
'coming soon',
'currently unavailable',
'en rupture de stock',
'as soon as stock is available',
'message if back in stock',
'nachricht bei',
'nicht auf lager',
'nicht lieferbar',
'nicht zur verfügung',
'no disponible temporalmente',
'not available',
'not in stock',
'out of stock',
'out-of-stock',
'não estamos a aceitar encomendas',
'produkt niedostępny',
'no longer in stock',
'sold out',
'temporarily out of stock',
'temporarily unavailable',
'we do not currently have an estimate of when this product will be back in stock.',
'zur zeit nicht an lager',
];
const elementsWithZeroChildren = Array.from(document.getElementsByTagName('*')).filter(element => element.children.length === 0);
for (let i = elementsWithZeroChildren.length - 1; i >= 0; i--) {
const element = elementsWithZeroChildren[i];
if (element.offsetWidth > 0 || element.offsetHeight > 0 || element.getClientRects().length > 0) {
var elementText="";
if (element.tagName.toLowerCase() === "input") {
elementText = element.value.toLowerCase();
} else {
elementText = element.textContent.toLowerCase();
}
for (const outOfStockText of outOfStockTexts) {
if (elementText.includes(outOfStockText)) {
return elementText; // item is out of stock
}
}
}
}
return 'Possibly in stock'; // possibly in stock, cant decide otherwise.
}
// returns the element text that makes it think it's out of stock
return isItemInStock();

View File

@@ -1,6 +1,3 @@
// Copyright (C) 2021 Leigh Morresi (dgtlmoon@gmail.com)
// All rights reserved.
// @file Scrape the page looking for elements of concern (%ELEMENTS%)
// http://matatk.agrip.org.uk/tests/position-and-width/
// https://stackoverflow.com/questions/26813480/when-is-element-getboundingclientrect-guaranteed-to-be-updated-accurate
@@ -92,8 +89,8 @@ for (var i = 0; i < elements.length; i++) {
continue
}
// Skip really small ones, and where width or height ==0
if (bbox['width'] * bbox['height'] < 100) {
// Forget really small ones
if (bbox['width'] < 10 && bbox['height'] < 10) {
continue;
}
@@ -149,6 +146,7 @@ for (var i = 0; i < elements.length; i++) {
}
// Inject the current one set in the include_filters, which may be a CSS rule
// used for displaying the current one in VisualSelector, where its not one we generated.
if (include_filters.length) {
@@ -207,9 +205,5 @@ if (include_filters.length) {
}
}
// Sort the elements so we find the smallest one first, in other words, we find the smallest one matching in that area
// so that we dont select the wrapping element by mistake and be unable to select what we want
size_pos.sort((a, b) => (a.width*a.height > b.width*b.height) ? 1 : -1)
// Window.width required for proper scaling in the frontend
return {'size_pos': size_pos, 'browser_width': window.innerWidth};

View File

@@ -0,0 +1,104 @@
#!/bin/bash
# live_server will throw errors even with live_server_scope=function if I have the live_server setup in different functions
# and I like to restart the server for each test (and have the test cleanup after each test)
# merge request welcome :)
# exit when any command fails
set -e
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
find tests/test_*py -type f|while read test_name
do
echo "TEST RUNNING $test_name"
pytest $test_name
done
echo "RUNNING WITH BASE_URL SET"
# Now re-run some tests with BASE_URL enabled
# Re #65 - Ability to include a link back to the installation, in the notification.
export BASE_URL="https://really-unique-domain.io"
pytest tests/test_notification.py
# Re-run with HIDE_REFERER set - could affect login
export HIDE_REFERER=True
pytest tests/test_access_control.py
# Now for the selenium and playwright/browserless fetchers
# Note - this is not UI functional tests - just checking that each one can fetch the content
echo "TESTING WEBDRIVER FETCH > SELENIUM/WEBDRIVER..."
docker run -d --name $$-test_selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome-debug:3.141.59
# takes a while to spin up
sleep 5
export WEBDRIVER_URL=http://localhost:4444/wd/hub
pytest tests/fetchers/test_content.py
pytest tests/test_errorhandling.py
unset WEBDRIVER_URL
docker kill $$-test_selenium
echo "TESTING WEBDRIVER FETCH > PLAYWRIGHT/BROWSERLESS..."
# Not all platforms support playwright (not ARM/rPI), so it's not packaged in requirements.txt
PLAYWRIGHT_VERSION=$(grep -i -E "RUN pip install.+" "$SCRIPT_DIR/../Dockerfile" | grep --only-matching -i -E "playwright[=><~+]+[0-9\.]+")
echo "using $PLAYWRIGHT_VERSION"
pip3 install "$PLAYWRIGHT_VERSION"
docker run -d --name $$-test_browserless -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.53-chrome-stable
# takes a while to spin up
sleep 5
export PLAYWRIGHT_DRIVER_URL=ws://127.0.0.1:3000
pytest tests/fetchers/test_content.py
pytest tests/test_errorhandling.py
pytest tests/visualselector/test_fetch_data.py
unset PLAYWRIGHT_DRIVER_URL
docker kill $$-test_browserless
# Test proxy list handling, starting two squids on different ports
# Each squid adds a different header to the response, which is the main thing we test for.
docker run -d --name $$-squid-one --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf -p 3128:3128 ubuntu/squid:4.13-21.10_edge
docker run -d --name $$-squid-two --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf -p 3129:3128 ubuntu/squid:4.13-21.10_edge
# So, basic HTTP as env var test
export HTTP_PROXY=http://localhost:3128
export HTTPS_PROXY=http://localhost:3128
pytest tests/proxy_list/test_proxy.py
docker logs $$-squid-one 2>/dev/null|grep one.changedetection.io
if [ $? -ne 0 ]
then
echo "Did not see a request to one.changedetection.io in the squid logs (while checking env vars HTTP_PROXY/HTTPS_PROXY)"
fi
unset HTTP_PROXY
unset HTTPS_PROXY
# 2nd test actually choose the preferred proxy from proxies.json
cp tests/proxy_list/proxies.json-example ./test-datastore/proxies.json
# Makes a watch use a preferred proxy
pytest tests/proxy_list/test_multiple_proxy.py
# Should be a request in the default "first" squid
docker logs $$-squid-one 2>/dev/null|grep chosen.changedetection.io
if [ $? -ne 0 ]
then
echo "Did not see a request to chosen.changedetection.io in the squid logs (while checking preferred proxy)"
fi
# And one in the 'second' squid (user selects this as preferred)
docker logs $$-squid-two 2>/dev/null|grep chosen.changedetection.io
if [ $? -ne 0 ]
then
echo "Did not see a request to chosen.changedetection.io in the squid logs (while checking preferred proxy)"
fi
# @todo - test system override proxy selection and watch defaults, setup a 3rd squid?
docker kill $$-squid-one
docker kill $$-squid-two

View File

@@ -1,38 +0,0 @@
#!/bin/bash
# live_server will throw errors even with live_server_scope=function if I have the live_server setup in different functions
# and I like to restart the server for each test (and have the test cleanup after each test)
# merge request welcome :)
# exit when any command fails
set -e
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
find tests/test_*py -type f|while read test_name
do
echo "TEST RUNNING $test_name"
pytest $test_name
done
echo "RUNNING WITH BASE_URL SET"
# Now re-run some tests with BASE_URL enabled
# Re #65 - Ability to include a link back to the installation, in the notification.
export BASE_URL="https://really-unique-domain.io"
pytest tests/test_notification.py
# Re-run with HIDE_REFERER set - could affect login
export HIDE_REFERER=True
pytest tests/test_access_control.py
# Re-run a few tests that will trigger brotli based storage
export SNAPSHOT_BROTLI_COMPRESSION_THRESHOLD=5
pytest tests/test_access_control.py
pytest tests/test_notification.py
pytest tests/test_backend.py
pytest tests/test_rss.py
pytest tests/test_unique_lines.py

View File

@@ -1,61 +0,0 @@
#!/bin/bash
# exit when any command fails
set -e
# Test proxy list handling, starting two squids on different ports
# Each squid adds a different header to the response, which is the main thing we test for.
docker run --network changedet-network -d --name squid-one --hostname squid-one --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf ubuntu/squid:4.13-21.10_edge
docker run --network changedet-network -d --name squid-two --hostname squid-two --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf ubuntu/squid:4.13-21.10_edge
# Used for configuring a custom proxy URL via the UI
docker run --network changedet-network -d \
--name squid-custom \
--hostname squid-custom \
--rm \
-v `pwd`/tests/proxy_list/squid-auth.conf:/etc/squid/conf.d/debian.conf \
-v `pwd`/tests/proxy_list/squid-passwords.txt:/etc/squid3/passwords \
ubuntu/squid:4.13-21.10_edge
## 2nd test actually choose the preferred proxy from proxies.json
docker run --network changedet-network \
-v `pwd`/tests/proxy_list/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json \
test-changedetectionio \
bash -c 'cd changedetectionio && pytest tests/proxy_list/test_multiple_proxy.py'
## Should be a request in the default "first" squid
docker logs squid-one 2>/dev/null|grep chosen.changedetection.io
if [ $? -ne 0 ]
then
echo "Did not see a request to chosen.changedetection.io in the squid logs (while checking preferred proxy - squid one)"
exit 1
fi
# And one in the 'second' squid (user selects this as preferred)
docker logs squid-two 2>/dev/null|grep chosen.changedetection.io
if [ $? -ne 0 ]
then
echo "Did not see a request to chosen.changedetection.io in the squid logs (while checking preferred proxy - squid two)"
exit 1
fi
# Test the UI configurable proxies
docker run --network changedet-network \
test-changedetectionio \
bash -c 'cd changedetectionio && pytest tests/proxy_list/test_select_custom_proxy.py'
# Should see a request for one.changedetection.io in there
docker logs squid-custom 2>/dev/null|grep "TCP_TUNNEL.200.*changedetection.io"
if [ $? -ne 0 ]
then
echo "Did not see a valid request to changedetection.io in the squid logs (while checking preferred proxy - squid two)"
exit 1
fi
docker kill squid-one squid-two squid-custom

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 43 KiB

View File

@@ -1,9 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg xmlns="http://www.w3.org/2000/svg" width="75.320129mm" height="92.604164mm" viewBox="0 0 75.320129 92.604164">
<g transform="translate(53.548057 -183.975276) scale(1.4843)">
<path fill="#ff2116" d="M-29.632812 123.94727c-3.551967 0-6.44336 2.89347-6.44336 6.44531v49.49804c0 3.55185 2.891393 6.44532 6.44336 6.44532H8.2167969c3.5519661 0 6.4433591-2.89335 6.4433591-6.44532v-40.70117s.101353-1.19181-.416015-2.35156c-.484969-1.08711-1.275391-1.84375-1.275391-1.84375a1.0584391 1.0584391 0 0 0-.0059-.008l-9.3906254-9.21094a1.0584391 1.0584391 0 0 0-.015625-.0156s-.8017392-.76344-1.9902344-1.27344c-1.39939552-.6005-2.8417968-.53711-2.8417968-.53711l.021484-.002z" color="#000" font-family="sans-serif" overflow="visible" paint-order="markers fill stroke" style="line-height:normal;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;text-transform:none;text-orientation:mixed;white-space:normal;shape-padding:0;isolation:auto;mix-blend-mode:normal;solid-color:#000000;solid-opacity:1"/>
<path fill="#f5f5f5" d="M-29.632812 126.06445h28.3789058a1.0584391 1.0584391 0 0 0 .021484 0s1.13480448.011 1.96484378.36719c.79889772.34282 1.36536982.86176 1.36914062.86524.0000125.00001.00391.004.00391.004l9.3671868 9.18945s.564354.59582.837891 1.20899c.220779.49491.234375 1.40039.234375 1.40039a1.0584391 1.0584391 0 0 0-.002.0449v40.74609c0 2.41592-1.910258 4.32813-4.3261717 4.32813H-29.632812c-2.415914 0-4.326172-1.91209-4.326172-4.32813v-49.49804c0-2.41603 1.910258-4.32813 4.326172-4.32813z" color="#000" font-family="sans-serif" overflow="visible" paint-order="markers fill stroke" style="line-height:normal;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;text-transform:none;text-orientation:mixed;white-space:normal;shape-padding:0;isolation:auto;mix-blend-mode:normal;solid-color:#000000;solid-opacity:1"/>
<path fill="#ff2116" d="M-23.40766 161.09299c-1.45669-1.45669.11934-3.45839 4.39648-5.58397l2.69124-1.33743 1.04845-2.29399c.57665-1.26169 1.43729-3.32036 1.91254-4.5748l.8641-2.28082-.59546-1.68793c-.73217-2.07547-.99326-5.19438-.52872-6.31588.62923-1.51909 2.69029-1.36323 3.50626.26515.63727 1.27176.57212 3.57488-.18329 6.47946l-.6193 2.38125.5455.92604c.30003.50932 1.1764 1.71867 1.9475 2.68743l1.44924 1.80272 1.8033728-.23533c5.72900399-.74758 7.6912472.523 7.6912472 2.34476 0 2.29921-4.4984914 2.48899-8.2760865-.16423-.8499666-.59698-1.4336605-1.19001-1.4336605-1.19001s-2.3665326.48178-3.531704.79583c-1.202707.32417-1.80274.52719-3.564509 1.12186 0 0-.61814.89767-1.02094 1.55026-1.49858 2.4279-3.24833 4.43998-4.49793 5.1723-1.3991.81993-2.86584.87582-3.60433.13733zm2.28605-.81668c.81883-.50607 2.47616-2.46625 3.62341-4.28553l.46449-.73658-2.11497 1.06339c-3.26655 1.64239-4.76093 3.19033-3.98386 4.12664.43653.52598.95874.48237 2.01093-.16792zm21.21809-5.95578c.80089-.56097.68463-1.69142-.22082-2.1472-.70466-.35471-1.2726074-.42759-3.1031574-.40057-1.1249.0767-2.9337647.3034-3.2403347.37237 0 0 .993716.68678 1.434896.93922.58731.33544 2.0145161.95811 3.0565161 1.27706 1.02785.31461 1.6224.28144 2.0729-.0409zm-8.53152-3.54594c-.4847-.50952-1.30889-1.57296-1.83152-2.3632-.68353-.89643-1.02629-1.52887-1.02629-1.52887s-.4996 1.60694-.90948 2.57394l-1.27876 3.16076-.37075.71695s1.971043-.64627 2.97389-.90822c1.0621668-.27744 3.21787-.70134 3.21787-.70134zm-2.74938-11.02573c.12363-1.0375.1761-2.07346-.15724-2.59587-.9246-1.01077-2.04057-.16787-1.85154 2.23517.0636.8084.26443 2.19033.53292 3.04209l.48817 1.54863.34358-1.16638c.18897-.64151.47882-2.02015.64411-3.06364z"/>
<path fill="#2c2c2c" d="M-20.930423 167.83862h2.364986q1.133514 0 1.840213.2169.706698.20991 1.189489.9446.482795.72769.482795 1.75625 0 .94459-.391832 1.6233-.391833.67871-1.056548.97958-.65772.30087-2.02913.30087h-.818651v3.72941h-1.581322zm1.581322 1.22447v3.33058h.783664q1.049552 0 1.44838-.39184.405826-.39183.405826-1.27345 0-.65772-.265887-1.06355-.265884-.41282-.587747-.50378-.314866-.098-1.000572-.098zm5.50664-1.22447h2.148082q1.560333 0 2.4909318.55276.9375993.55276 1.4133973 1.6443.482791 1.09153.482791 2.42096 0 1.3994-.4338151 2.49793-.4268149 1.09153-1.3154348 1.76324-.8816233.67172-2.5189212.67172h-2.267031zm1.581326 1.26645v7.018h.657715q1.378411 0 2.001144-.9516.6227329-.95858.6227329-2.5539 0-3.5125-2.6238769-3.5125zm6.4722254-1.26645h5.30372941v1.26645H-4.2075842v2.85478h2.9807225v1.26646h-2.9807225v4.16322h-1.5813254z" font-family="Franklin Gothic Medium Cond" letter-spacing="0" style="line-height:125%;-inkscape-font-specification:'Franklin Gothic Medium Cond'" word-spacing="4.26000023"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 5.0 KiB

View File

@@ -1,5 +1,4 @@
// Copyright (C) 2021 Leigh Morresi (dgtlmoon@gmail.com)
// All rights reserved.
// Horrible proof of concept code :)
// yes - this is really a hack, if you are a front-ender and want to help, please get in touch!
$(document).ready(function () {
@@ -178,10 +177,9 @@ $(document).ready(function () {
// Basically, find the most 'deepest'
var found = 0;
ctx.fillStyle = 'rgba(205,0,0,0.35)';
// Will be sorted by smallest width*height first
for (var i = 0; i <= selector_data['size_pos'].length; i++) {
for (var i = selector_data['size_pos'].length; i !== 0; i--) {
// draw all of them? let them choose somehow?
var sel = selector_data['size_pos'][i];
var sel = selector_data['size_pos'][i - 1];
// If we are in a bounding-box
if (e.offsetY > sel.top * y_scale && e.offsetY < sel.top * y_scale + sel.height * y_scale
&&
@@ -197,7 +195,7 @@ $(document).ready(function () {
// no need to keep digging
// @todo or, O to go out/up, I to go in
// or double click to go up/out the selector?
current_selected_i = i;
current_selected_i = i - 1;
found += 1;
break;
}

View File

@@ -1,7 +1,7 @@
$(document).ready(function () {
$(document).ready(function() {
function toggle() {
if ($('input[name="fetch_backend"]:checked').val() == 'html_webdriver') {
if (playwright_enabled) {
if(playwright_enabled) {
// playwright supports headers, so hide everything else
// See #664
$('#requests-override-options #request-method').hide();
@@ -14,14 +14,9 @@ $(document).ready(function () {
$('#requests-override-options').hide();
}
$('#webdriver-override-options').show();
} else if ($('input[name="fetch_backend"]:checked').val() == 'system') {
$('#requests-override-options #request-method').hide();
$('#requests-override-options #request-body').hide();
$('#ignore-status-codes-option').hide();
$('#requests-override-options').hide();
$('#webdriver-override-options').hide();
} else {
$('#requests-override-options').show();

View File

@@ -1,3 +0,0 @@
node_modules
package-lock.json

View File

@@ -1,17 +0,0 @@
ul#requests-extra_proxies {
list-style: none;
/* tidy up the table to look more "inline" */
li {
> label {
display: none;
}
}
/* each proxy entry is a `table` */
table {
tr {
display: inline;
}
}
}

View File

@@ -2,11 +2,10 @@
* -- BASE STYLES --
*/
@import "parts/_arrows";
@import "parts/_browser-steps";
@import "parts/_extra_proxies";
@import "parts/_spinners";
@import "parts/_variables";
@import "parts/_spinners";
@import "parts/_browser-steps";
@import "parts/_arrows";
body {
color: var(--color-text);
@@ -23,13 +22,6 @@ body {
width: 1px;
}
// Row icons like chrome, pdf, share, etc
.status-icon {
display: inline-block;
height: 1rem;
vertical-align: middle;
}
.pure-table-even {
background: var(--color-background);
}
@@ -241,10 +233,6 @@ body:before {
font-size: 85%;
}
.button-xsmall {
font-size: 70%;
}
.fetch-error {
padding-top: 1em;
font-size: 80%;
@@ -893,21 +881,6 @@ body.full-width {
font-size: .875em;
}
}
.text-filtering {
h3 {
margin-top: 0;
}
border: 1px solid #ccc;
padding: 1rem;
border-radius: 5px;
margin-bottom: 1rem;
fieldset:last-of-type {
padding-bottom: 0;
.pure-control-group {
padding-bottom: 0;
}
}
}
}
ul {
@@ -1063,30 +1036,3 @@ ul {
vertical-align: middle;
}
#quick-watch-processor-type {
color: #fff;
ul {
padding: 0.3rem;
li {
list-style: none;
font-size: 0.8rem;
}
}
}
.restock-label {
&.in-stock {
background-color: var(--color-background-button-green);
color: #fff;
}
&.not-in-stock {
background-color: var(--color-background-button-cancel);
color: #777;
}
padding: 3px;
border-radius: 3px;
white-space: nowrap;
}

View File

@@ -1,139 +1,6 @@
/*
* -- BASE STYLES --
*/
.arrow {
border: solid #1b98f8;
border-width: 0 2px 2px 0;
display: inline-block;
padding: 3px; }
.arrow.right {
transform: rotate(-45deg);
-webkit-transform: rotate(-45deg); }
.arrow.left {
transform: rotate(135deg);
-webkit-transform: rotate(135deg); }
.arrow.up, .arrow.asc {
transform: rotate(-135deg);
-webkit-transform: rotate(-135deg); }
.arrow.down, .arrow.desc {
transform: rotate(45deg);
-webkit-transform: rotate(45deg); }
#browser_steps {
/* convert rows to horizontal cells */ }
#browser_steps th {
display: none; }
#browser_steps li {
list-style: decimal;
padding: 5px; }
#browser_steps li:not(:first-child):hover {
opacity: 1.0; }
#browser_steps li .control {
padding-left: 5px;
padding-right: 5px; }
#browser_steps li .control a {
font-size: 70%; }
#browser_steps li.empty {
padding: 0px;
opacity: 0.35; }
#browser_steps li.empty .control {
display: none; }
#browser_steps li:hover {
background: #eee; }
#browser_steps li > label {
display: none; }
#browser-steps-fieldlist {
height: 100%;
overflow-y: scroll; }
#browser-steps .flex-wrapper {
display: flex;
flex-flow: row;
height: 600px;
/*@todo make this dynamic */ }
/* this is duplicate :( */
#browsersteps-selector-wrapper {
height: 100%;
width: 100%;
overflow-y: scroll;
position: relative;
/* nice tall skinny one */ }
#browsersteps-selector-wrapper > img {
position: absolute;
max-width: 100%; }
#browsersteps-selector-wrapper > canvas {
position: relative;
max-width: 100%; }
#browsersteps-selector-wrapper > canvas:hover {
cursor: pointer; }
#browsersteps-selector-wrapper .loader {
position: absolute;
left: 50%;
top: 50%;
transform: translate(-50%, -50%);
margin-left: -40px;
z-index: 100;
max-width: 350px;
text-align: center; }
#browsersteps-selector-wrapper .spinner, #browsersteps-selector-wrapper .spinner:after {
width: 80px;
height: 80px;
font-size: 3px; }
#browsersteps-selector-wrapper #browsersteps-click-start {
color: var(--color-grey-400); }
#browsersteps-selector-wrapper #browsersteps-click-start:hover {
cursor: pointer; }
ul#requests-extra_proxies {
list-style: none;
/* tidy up the table to look more "inline" */
/* each proxy entry is a `table` */ }
ul#requests-extra_proxies li > label {
display: none; }
ul#requests-extra_proxies table tr {
display: inline; }
/* spinner */
.spinner,
.spinner:after {
border-radius: 50%;
width: 10px;
height: 10px; }
.spinner {
margin: 0px auto;
font-size: 3px;
vertical-align: middle;
display: inline-block;
text-indent: -9999em;
border-top: 1.1em solid rgba(38, 104, 237, 0.2);
border-right: 1.1em solid rgba(38, 104, 237, 0.2);
border-bottom: 1.1em solid rgba(38, 104, 237, 0.2);
border-left: 1.1em solid #2668ed;
-webkit-transform: translateZ(0);
-ms-transform: translateZ(0);
transform: translateZ(0);
-webkit-animation: load8 1.1s infinite linear;
animation: load8 1.1s infinite linear; }
@-webkit-keyframes load8 {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg); }
100% {
-webkit-transform: rotate(360deg);
transform: rotate(360deg); } }
@keyframes load8 {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg); }
100% {
-webkit-transform: rotate(360deg);
transform: rotate(360deg); } }
/**
* CSS custom properties (aka variables).
*/
@@ -271,6 +138,130 @@ html[data-darkmode="true"] {
html[data-darkmode="true"] .watch-table .unviewed.error {
color: var(--color-watch-table-error); }
/* spinner */
.spinner,
.spinner:after {
border-radius: 50%;
width: 10px;
height: 10px; }
.spinner {
margin: 0px auto;
font-size: 3px;
vertical-align: middle;
display: inline-block;
text-indent: -9999em;
border-top: 1.1em solid rgba(38, 104, 237, 0.2);
border-right: 1.1em solid rgba(38, 104, 237, 0.2);
border-bottom: 1.1em solid rgba(38, 104, 237, 0.2);
border-left: 1.1em solid #2668ed;
-webkit-transform: translateZ(0);
-ms-transform: translateZ(0);
transform: translateZ(0);
-webkit-animation: load8 1.1s infinite linear;
animation: load8 1.1s infinite linear; }
@-webkit-keyframes load8 {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg); }
100% {
-webkit-transform: rotate(360deg);
transform: rotate(360deg); } }
@keyframes load8 {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg); }
100% {
-webkit-transform: rotate(360deg);
transform: rotate(360deg); } }
#browser_steps {
/* convert rows to horizontal cells */ }
#browser_steps th {
display: none; }
#browser_steps li {
list-style: decimal;
padding: 5px; }
#browser_steps li:not(:first-child):hover {
opacity: 1.0; }
#browser_steps li .control {
padding-left: 5px;
padding-right: 5px; }
#browser_steps li .control a {
font-size: 70%; }
#browser_steps li.empty {
padding: 0px;
opacity: 0.35; }
#browser_steps li.empty .control {
display: none; }
#browser_steps li:hover {
background: #eee; }
#browser_steps li > label {
display: none; }
#browser-steps-fieldlist {
height: 100%;
overflow-y: scroll; }
#browser-steps .flex-wrapper {
display: flex;
flex-flow: row;
height: 600px;
/*@todo make this dynamic */ }
/* this is duplicate :( */
#browsersteps-selector-wrapper {
height: 100%;
width: 100%;
overflow-y: scroll;
position: relative;
/* nice tall skinny one */ }
#browsersteps-selector-wrapper > img {
position: absolute;
max-width: 100%; }
#browsersteps-selector-wrapper > canvas {
position: relative;
max-width: 100%; }
#browsersteps-selector-wrapper > canvas:hover {
cursor: pointer; }
#browsersteps-selector-wrapper .loader {
position: absolute;
left: 50%;
top: 50%;
transform: translate(-50%, -50%);
margin-left: -40px;
z-index: 100;
max-width: 350px;
text-align: center; }
#browsersteps-selector-wrapper .spinner, #browsersteps-selector-wrapper .spinner:after {
width: 80px;
height: 80px;
font-size: 3px; }
#browsersteps-selector-wrapper #browsersteps-click-start {
color: var(--color-grey-400); }
#browsersteps-selector-wrapper #browsersteps-click-start:hover {
cursor: pointer; }
.arrow {
border: solid #1b98f8;
border-width: 0 2px 2px 0;
display: inline-block;
padding: 3px; }
.arrow.right {
transform: rotate(-45deg);
-webkit-transform: rotate(-45deg); }
.arrow.left {
transform: rotate(135deg);
-webkit-transform: rotate(135deg); }
.arrow.up, .arrow.asc {
transform: rotate(-135deg);
-webkit-transform: rotate(-135deg); }
.arrow.down, .arrow.desc {
transform: rotate(45deg);
-webkit-transform: rotate(45deg); }
body {
color: var(--color-text);
background: var(--color-background-page); }
@@ -284,11 +275,6 @@ body {
white-space: nowrap;
width: 1px; }
.status-icon {
display: inline-block;
height: 1rem;
vertical-align: middle; }
.pure-table-even {
background: var(--color-background); }
@@ -432,9 +418,6 @@ body:before {
.button-small {
font-size: 85%; }
.button-xsmall {
font-size: 70%; }
.fetch-error {
padding-top: 1em;
font-size: 80%;
@@ -872,17 +855,6 @@ body.full-width .edit-form {
color: var(--color-text-input-description); }
.edit-form .pure-form-message-inline code {
font-size: .875em; }
.edit-form .text-filtering {
border: 1px solid #ccc;
padding: 1rem;
border-radius: 5px;
margin-bottom: 1rem; }
.edit-form .text-filtering h3 {
margin-top: 0; }
.edit-form .text-filtering fieldset:last-of-type {
padding-bottom: 0; }
.edit-form .text-filtering fieldset:last-of-type .pure-control-group {
padding-bottom: 0; }
ul {
padding-left: 1em;
@@ -994,22 +966,3 @@ ul {
display: inline-block;
height: 0.8rem;
vertical-align: middle; }
#quick-watch-processor-type {
color: #fff; }
#quick-watch-processor-type ul {
padding: 0.3rem; }
#quick-watch-processor-type ul li {
list-style: none;
font-size: 0.8rem; }
.restock-label {
padding: 3px;
border-radius: 3px;
white-space: nowrap; }
.restock-label.in-stock {
background-color: var(--color-background-button-green);
color: #fff; }
.restock-label.not-in-stock {
background-color: var(--color-background-button-cancel);
color: #777; }

View File

@@ -1,20 +1,20 @@
from flask import (
flash
)
from . model import App, Watch
from copy import deepcopy
from os import path, unlink
from threading import Lock
import json
import logging
import os
import re
import requests
import secrets
import threading
import time
import uuid as uuid_builder
from copy import deepcopy
from os import path, unlink
from threading import Lock
import re
import requests
import secrets
from . model import App, Watch
# Is there an existing library to ensure some data store (JSON etc) is in sync with CRUD methods?
# Open a github issue if you know something :)
@@ -36,6 +36,7 @@ class ChangeDetectionStore:
self.datastore_path = datastore_path
self.json_store_path = "{}/url-watches.json".format(self.datastore_path)
self.needs_write = False
self.proxy_list = None
self.start_time = time.time()
self.stop_thread = False
# Base definition for all watchers
@@ -77,10 +78,10 @@ class ChangeDetectionStore:
self.__data['watching'][uuid] = Watch.model(datastore_path=self.datastore_path, default=watch)
print("Watching:", uuid, self.__data['watching'][uuid]['url'])
# First time ran, Create the datastore.
except (FileNotFoundError):
# First time ran, doesnt exist.
except (FileNotFoundError, json.decoder.JSONDecodeError):
if include_default_watches:
print("No JSON DB found at {}, creating JSON store at {}".format(self.json_store_path, self.datastore_path))
print("Creating JSON store at", self.datastore_path)
self.add_watch(url='https://news.ycombinator.com/',
tag='Tech news',
extras={'fetch_backend': 'html_requests'})
@@ -88,10 +89,8 @@ class ChangeDetectionStore:
self.add_watch(url='https://changedetection.io/CHANGELOG.txt',
tag='changedetection.io',
extras={'fetch_backend': 'html_requests'})
self.__data['version_tag'] = version_tag
# Just to test that proxies.json if it exists, doesnt throw a parsing error on startup
test_list = self.proxy_list
self.__data['version_tag'] = version_tag
# Helper to remove password protection
password_reset_lockfile = "{}/removepassword.lock".format(self.datastore_path)
@@ -117,6 +116,11 @@ class ChangeDetectionStore:
secret = secrets.token_hex(16)
self.__data['settings']['application']['api_access_token'] = secret
# Proxy list support - available as a selection in settings when text file is imported
proxy_list_file = "{}/proxies.json".format(self.datastore_path)
if path.isfile(proxy_list_file):
self.import_proxy_list(proxy_list_file)
# Bump the update version by running updates
self.run_updates()
@@ -171,6 +175,14 @@ class ChangeDetectionStore:
@property
def data(self):
has_unviewed = False
for uuid, watch in self.__data['watching'].items():
# #106 - Be sure this is None on empty string, False, None, etc
# Default var for fetch_backend
# @todo this may not be needed anymore, or could be easily removed
if not self.__data['watching'][uuid]['fetch_backend']:
self.__data['watching'][uuid]['fetch_backend'] = self.__data['settings']['application']['fetch_backend']
# Re #152, Return env base_url if not overriden, @todo also prefer the proxy pass url
env_base_url = os.getenv('BASE_URL','')
if not self.__data['settings']['application']['base_url']:
@@ -192,24 +204,27 @@ class ChangeDetectionStore:
tags.sort()
return tags
def unlink_history_file(self, path):
try:
unlink(path)
except (FileNotFoundError, IOError):
pass
# Delete a single watch by UUID
def delete(self, uuid):
import pathlib
import shutil
with self.lock:
if uuid == 'all':
self.__data['watching'] = {}
# GitHub #30 also delete history records
for uuid in self.data['watching']:
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
shutil.rmtree(path)
self.needs_write_urgent = True
for path in self.data['watching'][uuid].history.values():
self.unlink_history_file(path)
else:
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
shutil.rmtree(path)
for path in self.data['watching'][uuid].history.values():
self.unlink_history_file(path)
del self.data['watching'][uuid]
self.needs_write_urgent = True
@@ -287,7 +302,6 @@ class ChangeDetectionStore:
'method',
'paused',
'previous_md5',
'processor',
'subtractive_selectors',
'tag',
'text_should_not_be_present',
@@ -307,12 +321,9 @@ class ChangeDetectionStore:
logging.error("Error fetching metadata for shared watch link", url, str(e))
flash("Error fetching metadata for {}".format(url), 'error')
return False
from .model.Watch import is_safe_url
if not is_safe_url(url):
flash('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX', 'error')
return None
with self.lock:
# #Re 569
new_watch = Watch.model(datastore_path=self.datastore_path, default={
'url': url,
@@ -361,6 +372,11 @@ class ChangeDetectionStore:
f.write(screenshot)
f.close()
# Make a JPEG that's used in notifications (due to being a smaller size) available
from PIL import Image
im1 = Image.open(target_path)
im1.convert('RGB').save(target_path.replace('.png','.jpg'), quality=int(os.getenv("NOTIFICATION_SCREENSHOT_JPG_QUALITY", 75)))
def save_error_text(self, watch_uuid, contents):
if not self.data['watching'].get(watch_uuid):
@@ -447,30 +463,10 @@ class ChangeDetectionStore:
print ("Removing",item)
unlink(item)
@property
def proxy_list(self):
proxy_list = {}
proxy_list_file = os.path.join(self.datastore_path, 'proxies.json')
# Load from external config file
if path.isfile(proxy_list_file):
with open("{}/proxies.json".format(self.datastore_path)) as f:
proxy_list = json.load(f)
# Mapping from UI config if available
extras = self.data['settings']['requests'].get('extra_proxies')
if extras:
i=0
for proxy in extras:
i += 0
if proxy.get('proxy_name') and proxy.get('proxy_url'):
k = "ui-" + str(i) + proxy.get('proxy_name')
proxy_list[k] = {'label': proxy.get('proxy_name'), 'url': proxy.get('proxy_url')}
return proxy_list if len(proxy_list) else None
def import_proxy_list(self, filename):
with open(filename) as f:
self.proxy_list = json.load(f)
print ("Registered proxy list", list(self.proxy_list.keys()))
def get_preferred_proxy_for_watch(self, uuid):
@@ -480,10 +476,11 @@ class ChangeDetectionStore:
:return: proxy "key" id
"""
proxy_id = None
if self.proxy_list is None:
return None
# If it's a valid one
# If its a valid one
watch = self.data['watching'].get(uuid)
if watch.get('proxy') and watch.get('proxy') in list(self.proxy_list.keys()):
@@ -496,9 +493,8 @@ class ChangeDetectionStore:
if self.proxy_list.get(system_proxy_id):
return system_proxy_id
# Fallback - Did not resolve anything, or doesnt exist, use the first available
if system_proxy_id is None or not self.proxy_list.get(system_proxy_id):
# Fallback - Did not resolve anything, use the first available
if system_proxy_id is None:
first_default = list(self.proxy_list)[0]
return first_default
@@ -669,13 +665,3 @@ class ChangeDetectionStore:
self.data['settings']['application']['notification_urls'][i] = re.sub(r, r'{{\1}}', url)
return
# Some setups may have missed the correct default, so it shows the wrong config in the UI, although it will default to system-wide
def update_10(self):
for uuid, watch in self.data['watching'].items():
try:
if not watch.get('fetch_backend', ''):
watch['fetch_backend'] = 'system'
except:
continue
return

View File

@@ -17,15 +17,14 @@
<li><code>tgram://</code> bots cant send messages to other bots, so you should specify chat ID of non-bot user.</li>
<li><code>tgram://</code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li>
<li><code>gets://</code>, <code>posts://</code>, <code>puts://</code>, <code>deletes://</code> for direct API calls (or omit the "<code>s</code>" for non-SSL ie <code>get://</code>)</li>
<li>Accepts the <code>{{ '{{token}}' }}</code> placeholders listed below</li>
</ul>
</div>
<div class="notifications-wrapper">
<a id="send-test-notification" class="pure-button button-secondary button-xsmall" >Send test notification</a>
<a id="send-test-notification" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Send test notification</a>
{% if emailprefix %}
<a id="add-email-helper" class="pure-button button-secondary button-xsmall" >Add email</a>
<a id="add-email-helper" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Add email</a>
{% endif %}
<a href="{{url_for('notification_logs')}}" class="pure-button button-secondary button-xsmall" >Notification debug logs</a>
<a href="{{url_for('notification_logs')}}" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Notification debug logs</a>
</div>
</div>
<div id="notification-customisation" class="pure-control-group">
@@ -56,66 +55,48 @@
</thead>
<tbody>
<tr>
<td><code>{{ '{{base_url}}' }}</code></td>
<td><code>{{ '{{ base_url }}' }}</code></td>
<td>The URL of the changedetection.io instance you are running.</td>
</tr>
<tr>
<td><code>{{ '{{watch_url}}' }}</code></td>
<td><code>{{ '{{ watch_url }}' }}</code></td>
<td>The URL being watched.</td>
</tr>
<tr>
<td><code>{{ '{{watch_uuid}}' }}</code></td>
<td><code>{{ '{{ watch_uuid }}' }}</code></td>
<td>The UUID of the watch.</td>
</tr>
<tr>
<td><code>{{ '{{watch_title}}' }}</code></td>
<td><code>{{ '{{ watch_title }}' }}</code></td>
<td>The title of the watch.</td>
</tr>
<tr>
<td><code>{{ '{{watch_tag}}' }}</code></td>
<td><code>{{ '{{ watch_tag }}' }}</code></td>
<td>The watch label / tag</td>
</tr>
<tr>
<td><code>{{ '{{preview_url}}' }}</code></td>
<td><code>{{ '{{ preview_url }}' }}</code></td>
<td>The URL of the preview page generated by changedetection.io.</td>
</tr>
<tr>
<td><code>{{ '{{diff_url}}' }}</code></td>
<td>The URL of the diff output for the watch.</td>
</tr>
<tr>
<td><code>{{ '{{diff}}' }}</code></td>
<td>The diff output - only changes, additions, and removals</td>
</tr>
<tr>
<td><code>{{ '{{diff_added}}' }}</code></td>
<td>The diff output - only changes and additions</td>
</tr>
<tr>
<td><code>{{ '{{diff_removed}}' }}</code></td>
<td>The diff output - only changes and removals</td>
<td><code>{{ '{{ diff_url }}' }}</code></td>
<td>The diff output - differences only</td>
</tr>
<tr>
<td><code>{{ '{{diff_full}}' }}</code></td>
<td><code>{{ '{{ diff_full }}' }}</code></td>
<td>The diff output - full difference output</td>
</tr>
<tr>
<td><code>{{ '{{current_snapshot}}' }}</code></td>
<td><code>{{ '{{ current_snapshot }}' }}</code></td>
<td>The current snapshot value, useful when combined with JSON or CSS filters
</td>
</tr>
<tr>
<td><code>{{ '{{triggered_text}}' }}</code></td>
<td>Text that tripped the trigger from filters</td>
</tr>
</tbody>
</table>
<div class="pure-form-message-inline">
<br>
URLs generated by changedetection.io (such as <code>{{ '{{diff_url}}' }}</code>) require the <code>BASE_URL</code> environment variable set.<br>
URLs generated by changedetection.io (such as <code>{{ '{{ diff_url }}' }}</code>) require the <code>BASE_URL</code> environment variable set.<br/>
Your <code>BASE_URL</code> var is currently "{{settings_application['current_base_url']}}"
<br>
Warning: Contents of <code>{{ '{{diff}}' }}</code>, <code>{{ '{{diff_removed}}' }}</code>, and <code>{{ '{{diff_added}}' }}</code> depend on how the difference algorithm perceives the change. For example, an addition or removal could be perceived as a change in some cases. <a target="_new" href="https://github.com/dgtlmoon/changedetection.io/wiki/Using-the-%7B%7Bdiff%7D%7D,-%7B%7Bdiff_added%7D%7D,-and-%7B%7Bdiff_removal%7D%7D-notification-tokens">More Here</a> </br>
</div>
</div>
</div>

View File

@@ -76,12 +76,8 @@
</div>
<div class="tab-pane-inner" id="text">
<div class="tip">Pro-tip: Use <strong>show current snapshot</strong> tab to visualise what will be ignored.</div>
{% if password_enabled_and_share_is_off %}
<div class="tip">Pro-tip: You can enable <strong>"share access when password is enabled"</strong> from settings</div>
{% endif %}
<div class="tip">Pro-tip: Use <strong>show current snapshot</strong> tab to visualise what will be ignored.
</div>
<div class="snapshot-age">{{watch_a.snapshot_text_ctime|format_timestamp_timeago}}</div>
<table>
@@ -124,12 +120,12 @@
<div class="pure-control-group">
{{ render_field(extract_form.extract_regex) }}
<span class="pure-form-message-inline">
A <strong>RegEx</strong> is a pattern that identifies exactly which part inside of the text that you want to extract.<br>
A <strong>RegEx</strong> is a pattern that identifies exactly which part inside of the text that you want to extract.<br/>
<p>
For example, to extract only the numbers from text &dash;<br>
<strong>Raw text</strong>: <code>Temperature <span style="color: red">5.5</span>°C in Sydney</code><br>
<strong>RegEx to extract:</strong> <code>Temperature <span style="color: red">([0-9\.]+)</span></code><br>
For example, to extract only the numbers from text &dash;</br>
<strong>Raw text</strong>: <code>Temperature <span style="color: red">5.5</span>°C in Sydney</code></br>
<strong>RegEx to extract:</strong> <code>Temperature <span style="color: red">([0-9\.]+)</span></code><br/>
</p>
<p>
<a href="https://RegExr.com/">Be sure to test your RegEx here.</a>
@@ -154,4 +150,4 @@
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='diff-render.js')}}"></script>
{% endblock %}
{% endblock %}

View File

@@ -34,15 +34,8 @@
{% if playwright_enabled %}
<li class="tab"><a id="browsersteps-tab" href="#browser-steps">Browser Steps</a></li>
{% endif %}
{% if watch['processor'] == 'text_json_diff' %}
<li class="tab"><a id="visualselector-tab" href="#visualselector">Visual Filter Selector</a></li>
<li class="tab"><a href="#filters-and-triggers">Filters &amp; Triggers</a></li>
{% endif %}
{% if watch['processor'] == 'restock_diff' %}
<li class="tab"><a href="#restock">Restock Detection</a></li>
{% endif %}
<li class="tab"><a href="#notifications">Notifications</a></li>
</ul>
</div>
@@ -56,18 +49,8 @@
<fieldset>
<div class="pure-control-group">
{{ render_field(form.url, placeholder="https://...", required=true, class="m-d") }}
<span class="pure-form-message-inline">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></span><br>
<span class="pure-form-message-inline">You can use variables in the URL, perfect for inserting the current date and other logic, <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a></span><br>
<span class="pure-form-message-inline">
{% if watch['processor'] == 'text_json_diff' %}
Current mode: <strong>Webpage Text/HTML, JSON and PDF changes.</strong><br>
<a href="{{url_for('edit_page', uuid=uuid)}}?switch_processor=restock_diff" class="pure-button button-xsmall">Switch to re-stock detection mode.</a>
{% else %}
Current mode: <strong>Re-stock detection.</strong><br>
<a href="{{url_for('edit_page', uuid=uuid)}}?switch_processor=text_json_diff" class="pure-button button-xsmall">Switch to Webpage Text/HTML, JSON and PDF changes mode.</a>
{% endif %}
</span>
<span class="pure-form-message-inline">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></span><br/>
<span class="pure-form-message-inline">You can use variables in the URL, perfect for inserting the current date and other logic, <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a></span><br/>
</div>
<div class="pure-control-group">
{{ render_field(form.title, class="m-d") }}
@@ -123,10 +106,10 @@
{{ render_field(form.webdriver_delay) }}
<div class="pure-form-message-inline">
<strong>If you're having trouble waiting for the page to be fully rendered (text missing etc), try increasing the 'wait' time here.</strong>
<br>
<br/>
This will wait <i>n</i> seconds before extracting the text.
{% if using_global_webdriver_wait %}
<br><strong>Using the current global default settings</strong>
<br/><strong>Using the current global default settings</strong>
{% endif %}
</div>
</div>
@@ -231,10 +214,9 @@ User-Agent: wonderbra 1.0") }}
</fieldset>
</div>
{% if watch['processor'] == 'text_json_diff' %}
<div class="tab-pane-inner" id="filters-and-triggers">
<div class="pure-control-group">
<strong>Pro-tips:</strong><br>
<strong>Pro-tips:</strong><br/>
<ul>
<li>
Use the preview page to see your filters and triggers highlighted.
@@ -244,6 +226,12 @@ User-Agent: wonderbra 1.0") }}
</li>
</ul>
</div>
<fieldset>
<div class="pure-control-group">
{{ render_checkbox_field(form.check_unique_lines) }}
<span class="pure-form-message-inline">Good for websites that just move the content around, and you want to know when NEW content is added, compares new lines against all history for this watch.</span>
</div>
</fieldset>
<div class="pure-control-group">
{% set field = render_field(form.include_filters,
rows=5,
@@ -253,9 +241,9 @@ xpath://body/div/span[contains(@class, 'example-class')]",
%}
{{ field }}
{% if '/text()' in field %}
<span class="pure-form-message-inline"><strong>Note!: //text() function does not work where the &lt;element&gt; contains &lt;![CDATA[]]&gt;</strong></span><br>
<span class="pure-form-message-inline"><strong>Note!: //text() function does not work where the &lt;element&gt; contains &lt;![CDATA[]]&gt;</strong></span><br/>
{% endif %}
<span class="pure-form-message-inline">One rule per line, <i>any</i> rules that matches will be used.<br>
<span class="pure-form-message-inline">One rule per line, <i>any</i> rules that matches will be used.<br/>
<ul>
<li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li>
@@ -278,42 +266,40 @@ xpath://body/div/span[contains(@class, 'example-class')]",
</li>
</ul>
Please be sure that you thoroughly understand how to write CSS, JSONPath, XPath{% if jq_support %}, or jq selector{%endif%} rules before filing an issue on GitHub! <a
href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br>
href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br/>
</span>
</div>
<fieldset class="pure-control-group">
{{ render_field(form.subtractive_selectors, rows=5, placeholder="header
<div class="pure-control-group">
{{ render_field(form.subtractive_selectors, rows=5, placeholder="header
footer
nav
.stockticker") }}
<span class="pure-form-message-inline">
<span class="pure-form-message-inline">
<ul>
<li> Remove HTML element(s) by CSS selector before text conversion. </li>
<li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li>
</ul>
</span>
</fieldset>
<div class="text-filtering">
<fieldset class="pure-group" id="text-filtering-type-options">
<h3>Text filtering</h3>
Limit trigger/ignore/block/extract to;<br>
{{ render_checkbox_field(form.filter_text_added) }}
{{ render_checkbox_field(form.filter_text_replaced) }}
{{ render_checkbox_field(form.filter_text_removed) }}
<span class="pure-form-message-inline">Note: Depending on the length and similarity of the text on each line, the algorithm may consider an <strong>addition</strong> instead of <strong>replacement</strong> for example.</span>
<span class="pure-form-message-inline">So it's always better to select <strong>Added</strong>+<strong>Replaced</strong> when you're interested in new content.</span><br>
<span class="pure-form-message-inline">When content is merely moved in a list, it will also trigger an <strong>addition</strong>, consider enabling <code><strong>Only trigger when unique lines appear</strong></code></span>
</fieldset>
</div>
<fieldset class="pure-group">
{{ render_field(form.ignore_text, rows=5, placeholder="Some text to ignore in a line
/some.regex\d{2}/ for case-INsensitive regex
") }}
<span class="pure-form-message-inline">
<ul>
<li>Each line processed separately, any line matching will be ignored (removed before creating the checksum)</li>
<li>Regular Expression support, wrap the entire line in forward slash <code>/regex/</code></li>
<li>Changing this will affect the comparison checksum which may trigger an alert</li>
<li>Use the preview/show current tab to see ignores</li>
</ul>
</span>
<fieldset class="pure-control-group">
{{ render_checkbox_field(form.check_unique_lines) }}
<span class="pure-form-message-inline">Good for websites that just move the content around, and you want to know when NEW content is added, compares new lines against all history for this watch.</span>
</fieldset>
</fieldset>
<fieldset>
<div class="pure-control-group">
{{ render_field(form.trigger_text, rows=5, placeholder="Some text to wait for in a line
/some.regex\d{2}/ for case-INsensitive regex
") }}
") }}
<span class="pure-form-message-inline">
<ul>
<li>Text to wait for before triggering a change/notification, all text and regex are tested <i>case-insensitive</i>.</li>
@@ -324,21 +310,6 @@ nav
</span>
</div>
</fieldset>
<fieldset class="pure-group">
{{ render_field(form.ignore_text, rows=5, placeholder="Some text to ignore in a line
/some.regex\d{2}/ for case-INsensitive regex
") }}
<span class="pure-form-message-inline">
<ul>
<li>Each line processed separately, any line matching will be ignored (removed before creating the checksum)</li>
<li>Regular Expression support, wrap the entire line in forward slash <code>/regex/</code></li>
<li>Changing this will affect the comparison checksum which may trigger an alert</li>
<li>Use the preview/show current tab to see ignores</li>
</ul>
</span>
</fieldset>
<fieldset>
<div class="pure-control-group">
{{ render_field(form.text_should_not_be_present, rows=5, placeholder="For example: Out of stock
@@ -363,7 +334,7 @@ Unavailable") }}
<li>Extracts text in the final output (line by line) after other filters using regular expressions;
<ul>
<li>Regular expression &dash; example <code>/reports.+?2022/i</code></li>
<li>Use <code>//(?aiLmsux))</code> type flags (more <a href="https://docs.python.org/3/library/re.html#index-15">information here</a>)<br></li>
<li>Use <code>//(?aiLmsux))</code> type flags (more <a href="https://docs.python.org/3/library/re.html#index-15">information here</a>)<br/></li>
<li>Keyword example &dash; example <code>Out of stock</code></li>
<li>Use groups to extract just that text &dash; example <code>/reports.+?(\d+)/i</code> returns a list of years only</li>
</ul>
@@ -373,22 +344,8 @@ Unavailable") }}
</span>
</div>
</fieldset>
</div>
</div>
{% endif %}
{% if watch['processor'] == 'restock_diff' %}
<div class="tab-pane-inner" id="restock">
<fieldset>
<div class="pure-control-group">
{{ render_checkbox_field(form.in_stock_only) }}
<span class="pure-form-message-inline">Only trigger notifications when page changes from <strong>out of stock</strong> to <strong>back in stock</strong></span>
</div>
</fieldset>
</div>
{% endif %}
{% if watch['processor'] == 'text_json_diff' %}
<div class="tab-pane-inner visual-selector-ui" id="visualselector">
<img class="beta-logo" src="{{url_for('static_content', group='images', filename='beta-logo.png')}}">
@@ -396,7 +353,7 @@ Unavailable") }}
<div class="pure-control-group">
{% if visualselector_enabled %}
<span class="pure-form-message-inline">
The Visual Selector tool lets you select the <i>text</i> elements that will be used for the change detection &dash; after the <i>Browser Steps</i> has completed.<br><br>
The Visual Selector tool lets you select the <i>text</i> elements that will be used for the change detection &dash; after the <i>Browser Steps</i> has completed.<br/><br/>
</span>
<div id="selector-header">
@@ -421,7 +378,6 @@ Unavailable") }}
</div>
</fieldset>
</div>
{% endif %}
<div id="actions">
<div class="pure-control-group">

View File

@@ -1,6 +1,5 @@
{% extends 'base.html' %}
{% block content %}
{% from '_helpers.jinja' import render_field %}
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
<div class="edit-form monospaced-textarea">
@@ -15,6 +14,7 @@
<form class="pure-form pure-form-aligned" action="{{url_for('import_page')}}" method="POST">
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
<div class="tab-pane-inner" id="url-list">
<fieldset class="pure-group">
<legend>
Enter one URL per line, and optionally add tags for each URL after a space, delineated by comma
(,):
@@ -23,7 +23,7 @@
<br>
URLs which do not pass validation will stay in the textarea.
</legend>
{{ render_field(form.processor, class="processor") }}
<textarea name="urls" class="pure-input-1-2" placeholder="https://"
style="width: 100%;
@@ -31,24 +31,22 @@
white-space: pre;
overflow-wrap: normal;
overflow-x: scroll;" rows="25">{{ import_url_list_remaining }}</textarea>
</fieldset>
<div id="quick-watch-processor-type">
</div>
</div>
<div class="tab-pane-inner" id="distill-io">
<fieldset class="pure-group">
<legend>
Copy and Paste your Distill.io watch 'export' file, this should be a JSON file.<br>
Copy and Paste your Distill.io watch 'export' file, this should be a JSON file.</br>
This is <i>experimental</i>, supported fields are <code>name</code>, <code>uri</code>, <code>tags</code>, <code>config:selections</code>, the rest (including <code>schedule</code>) are ignored.
<br>
<br/>
<p>
How to export? <a href="https://distill.io/docs/web-monitor/how-export-and-import-monitors/">https://distill.io/docs/web-monitor/how-export-and-import-monitors/</a><br>
Be sure to set your default fetcher to Chrome if required.<br>
How to export? <a href="https://distill.io/docs/web-monitor/how-export-and-import-monitors/">https://distill.io/docs/web-monitor/how-export-and-import-monitors/</a><br/>
Be sure to set your default fetcher to Chrome if required.</br>
</p>
</legend>
@@ -77,7 +75,7 @@
]
}
" rows="25">{{ original_distill_json }}</textarea>
</fieldset>
</div>
<button type="submit" class="pure-button pure-input-1-2 pure-button-primary">Import</button>
</form>

View File

@@ -54,7 +54,7 @@
<div class="tip">
For now, Differences are performed on text, not graphically, only the latest screenshot is available.
</div>
<br>
</br>
{% if is_html_webdriver %}
{% if screenshot %}
<div class="snapshot-age">{{watch.snapshot_screenshot_ctime|format_timestamp_timeago}}</div>
@@ -67,4 +67,4 @@
{% endif %}
</div>
</div>
{% endblock %}
{% endblock %}

View File

@@ -21,7 +21,6 @@
<li class="tab"><a href="#fetching">Fetching</a></li>
<li class="tab"><a href="#filters">Global Filters</a></li>
<li class="tab"><a href="#api">API</a></li>
<li class="tab"><a href="#proxies">CAPTCHA &amp; Proxies</a></li>
</ul>
</div>
<div class="box-wrap inner">
@@ -40,7 +39,7 @@
<div class="pure-control-group">
{{ render_field(form.application.form.filter_failure_notification_threshold_attempts, class="filter_failure_notification_threshold_attempts") }}
<span class="pure-form-message-inline">After this many consecutive times that the CSS/xPath filter is missing, send a notification
<br>
<br/>
Set to <strong>0</strong> to disable
</span>
</div>
@@ -57,16 +56,11 @@
{% endif %}
</div>
<div class="pure-control-group">
{{ render_checkbox_field(form.application.form.shared_diff_access, class="shared_diff_access") }}
<span class="pure-form-message-inline">Allow access to view watch diff page when password is enabled (Good for sharing the diff page)
</span>
</div>
<div class="pure-control-group">
{{ render_field(form.application.form.base_url, placeholder="http://yoursite.com:5000/",
class="m-d") }}
<span class="pure-form-message-inline">
Base URL used for the <code>{{ '{{ base_url }}' }}</code> token in notifications and RSS links.<br>Default value is the ENV var 'BASE_URL' (Currently "{{settings_application['current_base_url']}}"),
Base URL used for the <code>{{ '{{ base_url }}' }}</code> token in notifications and RSS links.<br/>Default value is the ENV var 'BASE_URL' (Currently "{{settings_application['current_base_url']}}"),
<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Configurable-BASE_URL-setting">read more here</a>.
</span>
</div>
@@ -105,13 +99,13 @@
<p>Use the <strong>Basic</strong> method (default) where your watched sites don't need Javascript to render.</p>
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>
</span>
<br>
<br/>
Tip: <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support">Connect using BrightData Proxies, find out more here.</a>
</div>
<fieldset class="pure-group" id="webdriver-override-options">
<div class="pure-form-message-inline">
<strong>If you're having trouble waiting for the page to be fully rendered (text missing etc), try increasing the 'wait' time here.</strong>
<br>
<br/>
This will wait <i>n</i> seconds before extracting the text.
</div>
<div class="pure-control-group">
@@ -124,14 +118,14 @@
<fieldset class="pure-group">
{{ render_checkbox_field(form.application.form.ignore_whitespace) }}
<span class="pure-form-message-inline">Ignore whitespace, tabs and new-lines/line-feeds when considering if a change was detected.<br>
<span class="pure-form-message-inline">Ignore whitespace, tabs and new-lines/line-feeds when considering if a change was detected.<br/>
<i>Note:</i> Changing this will change the status of your existing watches, possibly trigger alerts etc.
</span>
</fieldset>
<fieldset class="pure-group">
{{ render_checkbox_field(form.application.form.render_anchor_tag_content) }}
<span class="pure-form-message-inline">Render anchor tag content, default disabled, when enabled renders links as <code>(link text)[https://somesite.com]</code>
<br>
<br/>
<i>Note:</i> Changing this could affect the content of your existing watches, possibly trigger alerts etc.
</span>
</fieldset>
@@ -151,7 +145,7 @@ nav
{{ render_field(form.application.form.global_ignore_text, rows=5, placeholder="Some text to ignore in a line
/some.regex\d{2}/ for case-INsensitive regex
") }}
<span class="pure-form-message-inline">Note: This is applied globally in addition to the per-watch rules.</span><br>
<span class="pure-form-message-inline">Note: This is applied globally in addition to the per-watch rules.</span><br/>
<span class="pure-form-message-inline">
<ul>
<li>Note: This is applied globally in addition to the per-watch rules.</li>
@@ -170,35 +164,20 @@ nav
<div class="pure-control-group">
{{ render_checkbox_field(form.application.form.api_access_token_enabled) }}
<div class="pure-form-message-inline">Restrict API access limit by using <code>x-api-key</code> header</div><br>
<div class="pure-form-message-inline"><br>API Key <span id="api-key">{{api_key}}</span>
<div class="pure-form-message-inline">Restrict API access limit by using <code>x-api-key</code> header</div><br/>
<div class="pure-form-message-inline"><br/>API Key <span id="api-key">{{api_key}}</span>
<span style="display:none;" id="api-key-copy" >copy</span>
</div>
</div>
</div>
<div class="tab-pane-inner" id="proxies">
<p><strong>Tip</strong>: You can connect to websites using <a href="https://brightdata.grsm.io/n0r16zf7eivq">BrightData</a> proxies, their service <strong>WebUnlocker</strong> will solve most CAPTCHAs, whilst their <strong>Residential Proxies</strong> may help to avoid CAPTCHA altogether. </p>
<p>It may be easier to try <strong>WebUnlocker</strong> first, WebUnlocker also supports country selection.</p>
<p>
When you have <a href="https://brightdata.grsm.io/n0r16zf7eivq">registered</a>, enabled the required services, visit the <A href="https://brightdata.com/cp/api_example?">API example page</A>, then select <strong>Python</strong>, set the country you wish to use, then copy+paste the example URL below<br>
The Proxy URL with BrightData should start with <code>http://brd-customer...</code>
</p>
<p>When you sign up using <a href="https://brightdata.grsm.io/n0r16zf7eivq">https://brightdata.grsm.io/n0r16zf7eivq</a> BrightData will match any first deposit up to $150</p>
<div class="pure-control-group">
{{ render_field(form.requests.form.extra_proxies) }}
<span class="pure-form-message-inline">"Name" will be used for selecting the proxy in the Watch Edit settings</span>
</div>
</div>
<div id="actions">
<div class="pure-control-group">
{{ render_button(form.save_button) }}
<a href="{{url_for('index')}}" class="pure-button button-small button-cancel">Back</a>
<a href="{{url_for('clear_all_history')}}" class="pure-button button-small button-cancel">Clear Snapshot History</a>
</div>
</div>
</form>
</div>

View File

@@ -21,10 +21,6 @@
{{ render_simple_field(form.edit_and_watch_submit_button, title="Edit first then Watch") }}
</div>
</div>
<div id="quick-watch-processor-type">
{{ render_simple_field(form.processor, title="Edit first then Watch") }}
</div>
</fieldset>
<span style="color:#eee; font-size: 80%;"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread-white.svg')}}" /> Tip: You can also add 'shared' watches. <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Sharing-a-Watch">More info</a></a></span>
</form>
@@ -32,12 +28,11 @@
<form class="pure-form" action="{{ url_for('form_watch_list_checkbox_operations') }}" method="POST" id="watch-list-form">
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
<div id="checkbox-operations">
<button class="pure-button button-secondary button-xsmall" name="op" value="pause">Pause</button>
<button class="pure-button button-secondary button-xsmall" name="op" value="unpause">UnPause</button>
<button class="pure-button button-secondary button-xsmall" name="op" value="mute">Mute</button>
<button class="pure-button button-secondary button-xsmall" name="op" value="unmute">UnMute</button>
<button class="pure-button button-secondary button-xsmall" name="op" value="recheck">Recheck</button>
<button class="pure-button button-secondary button-xsmall" name="op" value="notification-default">Use default notification</button>
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="pause">Pause</button>
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="unpause">UnPause</button>
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="mute">Mute</button>
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="unmute">UnMute</button>
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="notification-default">Use default notification</button>
<button class="pure-button button-secondary button-xsmall" style="background: #dd4242; font-size: 70%" name="op" value="delete">Delete</button>
</div>
<div>
@@ -61,9 +56,9 @@
<th></th>
{% set link_order = "desc" if sort_order else "asc" %}
{% set arrow_span = "" %}
<th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('index', sort='label', order=link_order, tag=active_tag)}}">Website <span class='arrow {{link_order}}'></span></a></th>
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_checked' else 'inactive' }}" href="{{url_for('index', sort='last_checked', order=link_order, tag=active_tag)}}">Last Checked <span class='arrow {{link_order}}'></span></a></th>
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_changed' else 'inactive' }}" href="{{url_for('index', sort='last_changed', order=link_order, tag=active_tag)}}">Last Changed <span class='arrow {{link_order}}'></span></a></th>
<th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('index', sort='label', order=link_order)}}">Website <span class='arrow {{link_order}}'></span></a></th>
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_checked' else 'inactive' }}" href="{{url_for('index', sort='last_checked', order=link_order)}}">Last Checked <span class='arrow {{link_order}}'></span></a></th>
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_changed' else 'inactive' }}" href="{{url_for('index', sort='last_changed', order=link_order)}}">Last Changed <span class='arrow {{link_order}}'></span></a></th>
<th></th>
</tr>
</thead>
@@ -76,7 +71,7 @@
{% if not ( loop.index >= 3 and loop.index <=4) %}{% continue %}{% endif %} -->
#}
<tr id="{{ watch.uuid }}"
class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }} processor-{{ watch['processor'] }}
class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }}
{% if watch.last_error is defined and watch.last_error != False %}error{% endif %}
{% if watch.last_notification_error is defined and watch.last_notification_error != False %}error{% endif %}
{% if watch.paused is defined and watch.paused != False %}paused{% endif %}
@@ -93,50 +88,22 @@
</td>
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
<a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}"></a>
<a class="link-spread" href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img class="status-icon" src="{{url_for('static_content', group='images', filename='spread.svg')}}" class="status-icon icon icon-spread" title="Create a link to share watch config with others" /></a>
<a class="link-spread" href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" class="icon icon-spread" title="Create a link to share watch config with others" /></a>
{% if watch.get_fetch_backend == "html_webdriver"
or ( watch.get_fetch_backend == "system" and system_default_fetcher == 'html_webdriver' )
%}
<img class="status-icon" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" title="Using a chrome browser" />
{% endif %}
{%if watch.fetch_backend == "html_webdriver" %}<img style="height: 1em; display:inline-block;" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" title="Using a chrome browser" />{% endif %}
{%if watch.is_pdf %}<img class="status-icon" src="{{url_for('static_content', group='images', filename='pdf-icon.svg')}}" title="Converting PDF to text" />{% endif %}
{% if watch.last_error is defined and watch.last_error != False %}
<div class="fetch-error">{{ watch.last_error }}
{% if '403' in watch.last_error %}
{% if has_proxies %}
<a href="{{ url_for('settings_page', uuid=watch.uuid) }}#proxies">Try other proxies/location</a>&nbsp;
{% endif %}
<a href="{{ url_for('settings_page', uuid=watch.uuid) }}#proxies">Try adding external proxies/locations</a>
{% endif %}
</div>
<div class="fetch-error">{{ watch.last_error }}</div>
{% endif %}
{% if watch.last_notification_error is defined and watch.last_notification_error != False %}
<div class="fetch-error notification-error"><a href="{{url_for('notification_logs')}}">{{ watch.last_notification_error }}</a></div>
{% endif %}
{% if watch['processor'] == 'text_json_diff' %}
{% if watch['has_ldjson_price_data'] and not watch['track_ldjson_price_data'] %}
<div class="ldjson-price-track-offer">Embedded price data detected, follow only price data? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
{% endif %}
{% if watch['track_ldjson_price_data'] == 'accepted' %}
<span class="tracking-ldjson-price-data" title="Automatically following embedded price information"><img src="{{url_for('static_content', group='images', filename='price-tag-icon.svg')}}" class="status-icon price-follow-tag-icon"/> Price</span>
{% endif %}
{% if watch['has_ldjson_price_data'] and not watch['track_ldjson_price_data'] %}
<div class="ldjson-price-track-offer">Embedded price data detected, follow only price data? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
{% endif %}
{% if watch['processor'] == 'restock_diff' %}
<span class="restock-label {{'in-stock' if watch['in_stock'] else 'not-in-stock' }}" title="detecting restock conditions">
<!-- maybe some object watch['processor'][restock_diff] or.. -->
{% if watch['last_checked'] %}
{% if watch['in_stock'] %} In stock {% else %} Not in stock {% endif %}
{% else %}
Not yet checked
{% endif %}
{% if watch['track_ldjson_price_data'] == 'accepted' %}
<span class="tracking-ldjson-price-data" title="Automatically following embedded price information"><img src="{{url_for('static_content', group='images', filename='price-tag-icon.svg')}}" class="price-follow-tag-icon"/> Price</span>
{% endif %}
{% if not active_tag %}
<span class="watch-tag-list">{{ watch.tag}}</span>
{% endif %}

View File

@@ -1,10 +1,10 @@
{
"proxy-one": {
"label": "Proxy One",
"url": "http://squid-one:3128"
"label": "One",
"url": "http://127.0.0.1:3128"
},
"proxy-two": {
"label": "Proxy Two",
"url": "http://squid-two:3128"
"label": "two",
"url": "http://127.0.0.1:3129"
}
}

View File

@@ -1,48 +0,0 @@
acl localnet src 0.0.0.1-0.255.255.255 # RFC 1122 "this" network (LAN)
acl localnet src 10.0.0.0/8 # RFC 1918 local private network (LAN)
acl localnet src 100.64.0.0/10 # RFC 6598 shared address space (CGN)
acl localnet src 169.254.0.0/16 # RFC 3927 link-local (directly plugged) machines
acl localnet src 172.16.0.0/12 # RFC 1918 local private network (LAN)
acl localnet src 192.168.0.0/16 # RFC 1918 local private network (LAN)
acl localnet src fc00::/7 # RFC 4193 local private network range
acl localnet src fe80::/10 # RFC 4291 link-local (directly plugged) machines
acl localnet src 159.65.224.174
acl SSL_ports port 443
acl Safe_ports port 80 # http
acl Safe_ports port 21 # ftp
acl Safe_ports port 443 # https
acl Safe_ports port 70 # gopher
acl Safe_ports port 210 # wais
acl Safe_ports port 1025-65535 # unregistered ports
acl Safe_ports port 280 # http-mgmt
acl Safe_ports port 488 # gss-http
acl Safe_ports port 591 # filemaker
acl Safe_ports port 777 # multiling http
acl CONNECT method CONNECT
http_access deny !Safe_ports
http_access deny CONNECT !SSL_ports
#http_access allow localhost manager
http_access deny manager
#http_access allow localhost
#http_access allow localnet
auth_param basic program /usr/lib/squid3/basic_ncsa_auth /etc/squid3/passwords
auth_param basic realm proxy
acl authenticated proxy_auth REQUIRED
http_access allow authenticated
http_access deny all
http_port 3128
coredump_dir /var/spool/squid
refresh_pattern ^ftp: 1440 20% 10080
refresh_pattern ^gopher: 1440 0% 1440
refresh_pattern -i (/cgi-bin/|\?) 0 0% 0
refresh_pattern \/(Packages|Sources)(|\.bz2|\.gz|\.xz)$ 0 0% 0 refresh-ims
refresh_pattern \/Release(|\.gpg)$ 0 0% 0 refresh-ims
refresh_pattern \/InRelease$ 0 0% 0 refresh-ims
refresh_pattern \/(Translation-.*)(|\.bz2|\.gz|\.xz)$ 0 0% 0 refresh-ims
refresh_pattern . 0 20% 4320
logfile_rotate 0

View File

@@ -1 +0,0 @@
test:$apr1$xvhFolTA$E/kz5/Rw1ewcyaSUdwqZs.

View File

@@ -1,50 +0,0 @@
#!/usr/bin/python3
import time
from flask import url_for
from ..util import live_server_setup, wait_for_all_checks
# just make a request, we will grep in the docker logs to see it actually got called
def test_select_custom(client, live_server):
live_server_setup(live_server)
# Goto settings, add our custom one
res = client.post(
url_for("settings_page"),
data={
"requests-time_between_check-minutes": 180,
"application-ignore_whitespace": "y",
"application-fetch_backend": "html_requests",
"requests-extra_proxies-0-proxy_name": "custom-test-proxy",
# test:awesome is set in tests/proxy_list/squid-passwords.txt
"requests-extra_proxies-0-proxy_url": "http://test:awesome@squid-custom:3128",
},
follow_redirects=True
)
assert b"Settings updated." in res.data
res = client.post(
url_for("import_page"),
# Because a URL wont show in squid/proxy logs due it being SSLed
# Use plain HTTP or a specific domain-name here
data={"urls": "https://changedetection.io/CHANGELOG.txt"},
follow_redirects=True
)
assert b"1 Imported" in res.data
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'Proxy Authentication Required' not in res.data
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True
)
# We should see something via proxy
assert b'<div class=""> - 0.' in res.data
#
# Now we should see the request in the container logs for "squid-squid-custom" because it will be the only default

View File

@@ -1,2 +0,0 @@
"""Tests for the app."""

View File

@@ -1,3 +0,0 @@
#!/usr/bin/python3
from .. import conftest

View File

@@ -1,106 +0,0 @@
#!/usr/bin/python3
import os
import time
from flask import url_for
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
from changedetectionio.notification import (
default_notification_body,
default_notification_format,
default_notification_title,
valid_notification_formats,
)
def set_original_response():
test_return_data = """<html>
<body>
Some initial text<br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
<div>price: $10.99</div>
<div id="sametext">Out of stock</div>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
return None
def set_back_in_stock_response():
test_return_data = """<html>
<body>
Some initial text<br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
<div>price: $10.99</div>
<div id="sametext">Available!</div>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
return None
# Add a site in paused mode, add an invalid filter, we should still have visual selector data ready
def test_restock_detection(client, live_server):
set_original_response()
#assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
time.sleep(1)
live_server_setup(live_server)
#####################
notification_url = url_for('test_notification_endpoint', _external=True).replace('http://localhost', 'http://changedet').replace('http', 'json')
#####################
# Set this up for when we remove the notification from the watch, it should fallback with these details
res = client.post(
url_for("settings_page"),
data={"application-notification_urls": notification_url,
"application-notification_title": "fallback-title "+default_notification_title,
"application-notification_body": "fallback-body "+default_notification_body,
"application-notification_format": default_notification_format,
"requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_webdriver"},
follow_redirects=True
)
# Add our URL to the import page, because the docker container (playwright/selenium) wont be able to connect to our usual test url
test_url = url_for('test_endpoint', _external=True).replace('http://localhost', 'http://changedet')
client.post(
url_for("form_quick_watch_add"),
data={"url": test_url, "tag": '', 'processor': 'restock_diff'},
follow_redirects=True
)
# Is it correctly show as NOT in stock?
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'not-in-stock' in res.data
# Is it correctly shown as in stock
set_back_in_stock_response()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'not-in-stock' not in res.data
# We should have a notification
time.sleep(2)
assert os.path.isfile("test-datastore/notification.txt")
os.unlink("test-datastore/notification.txt")
# Default behaviour is to only fire notification when it goes OUT OF STOCK -> IN STOCK
# So here there should be no file, because we go IN STOCK -> OUT OF STOCK
set_original_response()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
wait_for_all_checks(client)
assert not os.path.isfile("test-datastore/notification.txt")

Binary file not shown.

View File

@@ -1,34 +1,18 @@
from . util import live_server_setup, extract_UUID_from_client
from flask import url_for
import time
from . util import live_server_setup
def test_check_access_control(app, client, live_server):
def test_check_access_control(app, client):
# Still doesnt work, but this is closer.
live_server_setup(live_server)
with app.test_client(use_cookies=True) as c:
# Check we don't have any password protection enabled yet.
res = c.get(url_for("settings_page"))
assert b"Remove password" not in res.data
# add something that we can hit via diff page later
res = c.post(
url_for("import_page"),
data={"urls": url_for('test_random_content_endpoint', _external=True)},
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(2)
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
assert b'1 watches queued for rechecking.' in res.data
time.sleep(2)
# Enable password check and diff page access bypass
# Enable password check.
res = c.post(
url_for("settings_page"),
data={"application-password": "foobar",
"application-shared_diff_access": "True",
"requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_requests"},
follow_redirects=True
@@ -38,15 +22,9 @@ def test_check_access_control(app, client, live_server):
# Check we hit the login
res = c.get(url_for("index"), follow_redirects=True)
# Should be logged out
assert b"Login" in res.data
# The diff page should return something valid when logged out
res = client.get(url_for("diff_history_page", uuid="first"))
assert b'Random content' in res.data
# Menu should not be available yet
# assert b"SETTINGS" not in res.data
# assert b"BACKUP" not in res.data
@@ -131,25 +109,3 @@ def test_check_access_control(app, client, live_server):
assert b"Password protection enabled" not in res.data
# Now checking the diff access
# Enable password check and diff page access bypass
res = c.post(
url_for("settings_page"),
data={"application-password": "foobar",
# Should be disabled
# "application-shared_diff_access": "True",
"requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Password protection enabled." in res.data
# Check we hit the login
res = c.get(url_for("index"), follow_redirects=True)
# Should be logged out
assert b"Login" in res.data
# The diff page should return something valid when logged out
res = client.get(url_for("diff_history_page", uuid="first"))
assert b'Random content' not in res.data

View File

@@ -1,176 +0,0 @@
#!/usr/bin/python3
import time
from flask import url_for
from .util import live_server_setup
from changedetectionio import html_tools
def set_original(excluding=None, add_line=None):
test_return_data = """<html>
<body>
<p>Some initial text</p>
<p>So let's see what happens.</p>
<p>and a new line!</p>
<p>The golden line</p>
<p>A BREAK TO MAKE THE TOP LINE STAY AS "REMOVED" OR IT WILL GET COUNTED AS "CHANGED INTO"</p>
<p>Something irrelevant</p>
</body>
</html>
"""
if add_line:
c=test_return_data.splitlines()
c.insert(5, add_line)
test_return_data = "\n".join(c)
if excluding:
output = ""
for i in test_return_data.splitlines():
if not excluding in i:
output += f"{i}\n"
test_return_data = output
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
def test_setup(client, live_server):
live_server_setup(live_server)
def test_check_removed_line_contains_trigger(client, live_server):
sleep_time_for_fetch_thread = 3
# Give the endpoint time to spin up
time.sleep(1)
set_original()
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# Goto the edit page, add our ignore text
# Add our URL to the import page
res = client.post(
url_for("edit_page", uuid="first"),
data={"trigger_text": 'The golden line',
"url": test_url,
'fetch_backend': "html_requests",
'filter_text_removed': 'y'},
follow_redirects=True
)
assert b"Updated watch." in res.data
time.sleep(sleep_time_for_fetch_thread)
set_original(excluding='Something irrelevant')
# A line thats not the trigger should not trigger anything
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
assert b'1 watches queued for rechecking.' in res.data
time.sleep(sleep_time_for_fetch_thread)
res = client.get(url_for("index"))
assert b'unviewed' not in res.data
# The trigger line is REMOVED, this should trigger
set_original(excluding='The golden line')
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(sleep_time_for_fetch_thread)
res = client.get(url_for("index"))
assert b'unviewed' in res.data
# Now add it back, and we should not get a trigger
client.get(url_for("mark_all_viewed"), follow_redirects=True)
set_original(excluding=None)
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(sleep_time_for_fetch_thread)
res = client.get(url_for("index"))
assert b'unviewed' not in res.data
# Remove it again, and we should get a trigger
set_original(excluding='The golden line')
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(sleep_time_for_fetch_thread)
res = client.get(url_for("index"))
assert b'unviewed' in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_check_add_line_contains_trigger(client, live_server):
sleep_time_for_fetch_thread = 3
# Give the endpoint time to spin up
time.sleep(1)
test_notification_url = url_for('test_notification_endpoint', _external=True).replace('http://', 'post://') + "?xxx={{ watch_url }}"
res = client.post(
url_for("settings_page"),
data={"application-notification_title": "New ChangeDetection.io Notification - {{ watch_url }}",
"application-notification_body": 'triggered text was -{{triggered_text}}-',
# https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation
"application-notification_urls": test_notification_url,
"application-minutes_between_check": 180,
"application-fetch_backend": "html_requests"
},
follow_redirects=True
)
assert b'Settings updated' in res.data
set_original()
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# Goto the edit page, add our ignore text
# Add our URL to the import page
res = client.post(
url_for("edit_page", uuid="first"),
data={"trigger_text": 'Oh yes please',
"url": test_url,
'fetch_backend': "html_requests",
'filter_text_removed': '',
'filter_text_added': 'y'},
follow_redirects=True
)
assert b"Updated watch." in res.data
time.sleep(sleep_time_for_fetch_thread)
set_original(excluding='Something irrelevant')
# A line thats not the trigger should not trigger anything
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
assert b'1 watches queued for rechecking.' in res.data
time.sleep(sleep_time_for_fetch_thread)
res = client.get(url_for("index"))
assert b'unviewed' not in res.data
# The trigger line is ADDED, this should trigger
set_original(add_line='<p>Oh yes please</p>')
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(sleep_time_for_fetch_thread)
res = client.get(url_for("index"))
assert b'unviewed' in res.data
with open("test-datastore/notification.txt", 'r') as f:
response= f.read()
assert '-Oh yes please-' in response
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data

View File

@@ -11,10 +11,10 @@ import uuid
def set_original_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div id="sametext">Some text thats the same</div>
<div id="changetext">Some text that will change</div>
</body>
@@ -29,10 +29,10 @@ def set_original_response():
def set_modified_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>which has this one new line</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div id="sametext">Some text thats the same</div>
<div id="changetext">Some text that changes</div>
</body>
@@ -53,15 +53,14 @@ def is_valid_uuid(val):
return False
def test_setup(client, live_server):
live_server_setup(live_server)
def test_api_simple(client, live_server):
live_server_setup(live_server)
api_key = extract_api_key_from_UI(client)
# Create a watch
set_original_response()
watch_uuid = None
# Validate bad URL
test_url = url_for('test_endpoint', _external=True,
@@ -81,34 +80,25 @@ def test_api_simple(client, live_server):
headers={'content-type': 'application/json', 'x-api-key': api_key},
follow_redirects=True
)
assert is_valid_uuid(res.json.get('uuid'))
watch_uuid = res.json.get('uuid')
s = json.loads(res.data)
assert is_valid_uuid(s['uuid'])
watch_uuid = s['uuid']
assert res.status_code == 201
time.sleep(3)
# Verify its in the list and that recheck worked
res = client.get(
url_for("createwatch", tag="OnE"),
url_for("createwatch"),
headers={'x-api-key': api_key}
)
assert watch_uuid in res.json.keys()
before_recheck_info = res.json[watch_uuid]
assert watch_uuid in json.loads(res.data).keys()
before_recheck_info = json.loads(res.data)[watch_uuid]
assert before_recheck_info['last_checked'] != 0
#705 `last_changed` should be zero on the first check
assert before_recheck_info['last_changed'] == 0
assert before_recheck_info['title'] == 'My test URL'
# Check the limit by tag doesnt return anything when nothing found
res = client.get(
url_for("createwatch", tag="Something else entirely"),
headers={'x-api-key': api_key}
)
assert len(res.json) == 0
time.sleep(2)
set_modified_response()
# Trigger recheck of all ?recheck_all=1
client.get(
@@ -122,7 +112,7 @@ def test_api_simple(client, live_server):
url_for("createwatch"),
headers={'x-api-key': api_key},
)
after_recheck_info = res.json[watch_uuid]
after_recheck_info = json.loads(res.data)[watch_uuid]
assert after_recheck_info['last_checked'] != before_recheck_info['last_checked']
assert after_recheck_info['last_changed'] != 0
@@ -131,11 +121,12 @@ def test_api_simple(client, live_server):
url_for("watchhistory", uuid=watch_uuid),
headers={'x-api-key': api_key},
)
assert len(res.json) == 2, "Should have two history entries (the original and the changed)"
history = json.loads(res.data)
assert len(history) == 2, "Should have two history entries (the original and the changed)"
# Fetch a snapshot by timestamp, check the right one was found
res = client.get(
url_for("watchsinglehistory", uuid=watch_uuid, timestamp=list(res.json.keys())[-1]),
url_for("watchsinglehistory", uuid=watch_uuid, timestamp=list(history.keys())[-1]),
headers={'x-api-key': api_key},
)
assert b'which has this one new line' in res.data
@@ -152,7 +143,7 @@ def test_api_simple(client, live_server):
url_for("watch", uuid=watch_uuid),
headers={'x-api-key': api_key}
)
watch = res.json
watch = json.loads(res.data)
# @todo how to handle None/default global values?
assert watch['history_n'] == 2, "Found replacement history section, which is in its own API"
@@ -161,46 +152,10 @@ def test_api_simple(client, live_server):
url_for("systeminfo"),
headers={'x-api-key': api_key},
)
assert res.json.get('watch_count') == 1
assert res.json.get('uptime') > 0.5
info = json.loads(res.data)
assert info.get('watch_count') == 1
assert info.get('uptime') > 0.5
######################################################
# Mute and Pause, check it worked
res = client.get(
url_for("watch", uuid=watch_uuid, paused='paused'),
headers={'x-api-key': api_key}
)
assert b'OK' in res.data
res = client.get(
url_for("watch", uuid=watch_uuid, muted='muted'),
headers={'x-api-key': api_key}
)
assert b'OK' in res.data
res = client.get(
url_for("watch", uuid=watch_uuid),
headers={'x-api-key': api_key}
)
assert res.json.get('paused') == True
assert res.json.get('notification_muted') == True
# Now unpause, unmute
res = client.get(
url_for("watch", uuid=watch_uuid, muted='unmuted'),
headers={'x-api-key': api_key}
)
assert b'OK' in res.data
res = client.get(
url_for("watch", uuid=watch_uuid, paused='unpaused'),
headers={'x-api-key': api_key}
)
assert b'OK' in res.data
res = client.get(
url_for("watch", uuid=watch_uuid),
headers={'x-api-key': api_key}
)
assert res.json.get('paused') == 0
assert res.json.get('notification_muted') == 0
######################################################
# Finally delete the watch
res = client.delete(
@@ -214,7 +169,9 @@ def test_api_simple(client, live_server):
url_for("createwatch"),
headers={'x-api-key': api_key}
)
assert len(res.json) == 0, "Watch list should be empty"
watch_list = json.loads(res.data)
assert len(watch_list) == 0, "Watch list should be empty"
def test_access_denied(client, live_server):
# `config_api_token_enabled` Should be On by default
@@ -246,97 +203,3 @@ def test_access_denied(client, live_server):
url_for("createwatch")
)
assert res.status_code == 200
# Cleanup everything
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
res = client.post(
url_for("settings_page"),
data={
"requests-time_between_check-minutes": 180,
"application-fetch_backend": "html_requests",
"application-api_access_token_enabled": "y"
},
follow_redirects=True
)
assert b"Settings updated." in res.data
def test_api_watch_PUT_update(client, live_server):
#live_server_setup(live_server)
api_key = extract_api_key_from_UI(client)
time.sleep(1)
# Create a watch
set_original_response()
test_url = url_for('test_endpoint', _external=True,
headers={'x-api-key': api_key}, )
# Create new
res = client.post(
url_for("createwatch"),
data=json.dumps({"url": test_url, 'tag': "One, Two", "title": "My test URL", 'headers': {'cookie': 'yum'} }),
headers={'content-type': 'application/json', 'x-api-key': api_key},
follow_redirects=True
)
assert res.status_code == 201
time.sleep(1)
# Get a listing, it will be the first one
res = client.get(
url_for("createwatch"),
headers={'x-api-key': api_key}
)
watch_uuid = list(res.json.keys())[0]
# Check in the edit page just to be sure
res = client.get(
url_for("edit_page", uuid=watch_uuid),
)
assert b"cookie: yum" in res.data, "'cookie: yum' found in 'headers' section"
# HTTP PUT ( UPDATE an existing watch )
res = client.put(
url_for("watch", uuid=watch_uuid),
headers={'x-api-key': api_key, 'content-type': 'application/json'},
data=json.dumps({"title": "new title", 'time_between_check': {'minutes': 552}, 'headers': {'cookie': 'all eaten'}}),
)
assert res.status_code == 200, "HTTP PUT update was sent OK"
# HTTP GET single watch, title should be updated
res = client.get(
url_for("watch", uuid=watch_uuid),
headers={'x-api-key': api_key}
)
assert res.json.get('title') == 'new title'
# Check in the edit page just to be sure
res = client.get(
url_for("edit_page", uuid=watch_uuid),
)
assert b"new title" in res.data, "new title found in edit page"
assert b"552" in res.data, "552 minutes found in edit page"
assert b"One, Two" in res.data, "Tag 'One, Two' was found"
assert b"cookie: all eaten" in res.data, "'cookie: all eaten' found in 'headers' section"
######################################################
# HTTP PUT try a field that doenst exist
# HTTP PUT an update
res = client.put(
url_for("watch", uuid=watch_uuid),
headers={'x-api-key': api_key, 'content-type': 'application/json'},
data=json.dumps({"title": "new title", "some other field": "uh oh"}),
)
assert res.status_code == 400, "Should get error 400 when we give a field that doesnt exist"
# Message will come from `flask_expects_json`
assert b'Additional properties are not allowed' in res.data
# Cleanup everything
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data

View File

@@ -7,10 +7,10 @@ from .util import live_server_setup, extract_UUID_from_client, extract_api_key_f
def set_response_with_ldjson():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div class="sametext">Some text thats the same</div>
<div class="changetext">Some text that will change</div>
<script type="application/ld+json">
@@ -61,10 +61,10 @@ def set_response_with_ldjson():
def set_response_without_ldjson():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div class="sametext">Some text thats the same</div>
<div class="changetext">Some text that will change</div>
</body>
@@ -143,4 +143,4 @@ def test_check_ldjson_price_autodetect(client, live_server):
assert b'ldjson-price-track-offer' not in res.data
##########################################################################################
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)

View File

@@ -3,7 +3,7 @@
import time
from flask import url_for
from urllib.request import urlopen
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
sleep_time_for_fetch_thread = 3
@@ -11,7 +11,7 @@ sleep_time_for_fetch_thread = 3
# Basic test to check inscriptus is not adding return line chars, basically works etc
def test_inscriptus():
from inscriptis import get_text
html_content = "<html><body>test!<br>ok man</body></html>"
html_content = "<html><body>test!<br/>ok man</body></html>"
stripped_text_from_html = get_text(html_content)
assert stripped_text_from_html == 'test!\nok man'
@@ -67,7 +67,7 @@ def test_check_basic_change_detection_functionality(client, live_server):
# Force recheck
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
assert b'1 watches queued for rechecking.' in res.data
assert b'1 watches are queued for rechecking.' in res.data
wait_for_all_checks(client)
@@ -76,13 +76,12 @@ def test_check_basic_change_detection_functionality(client, live_server):
assert b'unviewed' in res.data
# #75, and it should be in the RSS feed
rss_token = extract_rss_token_from_UI(client)
res = client.get(url_for("rss", token=rss_token, _external=True))
res = client.get(url_for("rss"))
expected_url = url_for('test_endpoint', _external=True)
assert b'<rss' in res.data
# re #16 should have the diff in here too
assert b'(into) which has this one new line' in res.data
assert b'(into ) which has this one new line' in res.data
assert b'CDATA' in res.data
assert expected_url.encode('utf-8') in res.data

View File

@@ -8,10 +8,10 @@ from changedetectionio import html_tools
def set_original_ignore_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
</body>
</html>
@@ -24,10 +24,10 @@ def set_original_ignore_response():
def set_modified_original_ignore_response():
test_return_data = """<html>
<body>
Some NEW nice initial text<br>
Some NEW nice initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<p>new ignore stuff</p>
<p>out of stock</p>
<p>blah</p>
@@ -44,11 +44,11 @@ def set_modified_original_ignore_response():
def set_modified_response_minus_block_text():
test_return_data = """<html>
<body>
Some NEW nice initial text<br>
Some NEW nice initial text</br>
<p>Which is across multiple lines</p>
<p>now on sale $2/p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<p>new ignore stuff</p>
<p>blah</p>
</body>
@@ -87,10 +87,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
# Add our URL to the import page
res = client.post(
url_for("edit_page", uuid="first"),
data={"text_should_not_be_present": ignore_text,
"url": test_url,
'fetch_backend': "html_requests"
},
data={"text_should_not_be_present": ignore_text, "url": test_url, 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
@@ -132,6 +129,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
set_modified_response_minus_block_text()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(sleep_time_for_fetch_thread)
res = client.get(url_for("index"))
assert b'unviewed' in res.data

View File

@@ -12,10 +12,10 @@ def test_setup(live_server):
def set_original_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div id="sametext">Some text thats the same</div>
<div id="changetext">Some text that will change</div>
</body>
@@ -29,10 +29,10 @@ def set_original_response():
def set_modified_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>which has this one new line</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div id="sametext">Some text thats the same</div>
<div id="changetext">Some text that changes</div>
</body>

View File

@@ -25,10 +25,10 @@ def set_original_response():
</ul>
</nav>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div id="changetext">Some text that will change</div>
</body>
<footer>
@@ -54,10 +54,10 @@ def set_modified_response():
</ul>
</nav>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div id="changetext">Some text that changes</div>
</body>
<footer>
@@ -71,6 +71,7 @@ def set_modified_response():
def test_element_removal_output():
from changedetectionio import fetch_site_status
from inscriptis import get_text
# Check text with sub-parts renders correctly
@@ -84,7 +85,7 @@ def test_element_removal_output():
</ul>
</nav>
<body>
Some initial text<br>
Some initial text</br>
<p>across multiple lines</p>
<div id="changetext">Some text that changes</div>
</body>

View File

@@ -10,10 +10,10 @@ from ..html_tools import *
def set_original_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div id="sametext">Some text thats the same</div>
<div class="changetext">Some text that will change</div>
</body>
@@ -28,12 +28,12 @@ def set_original_response():
def set_modified_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>which has this one new line</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div id="sametext">Some text thats the same</div>
<div class="changetext">Some text that did change ( 1000 online <br> 80 guests<br> 2000 online )</div>
<div class="changetext">Some text that did change ( 1000 online <br/> 80 guests<br/> 2000 online )</div>
<div class="changetext">SomeCase insensitive 3456</div>
</body>
</html>
@@ -49,8 +49,8 @@ def set_multiline_response():
test_return_data = """<html>
<body>
<p>Something <br>
across 6 billion multiple<br>
<p>Something <br/>
across 6 billion multiple<br/>
lines
</p>

View File

@@ -11,10 +11,10 @@ from changedetectionio.model import App
def set_response_without_filter():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div id="nope-doesnt-exist">Some text thats the same</div>
</body>
</html>
@@ -28,10 +28,10 @@ def set_response_without_filter():
def set_response_with_filter():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div class="ticket-available">Ticket now on sale!</div>
</body>
</html>
@@ -117,3 +117,18 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se
assert 'Ticket now on sale' in notification
os.unlink("test-datastore/notification.txt")
# Test that if it gets removed, then re-added, we get a notification
# Remove the target and re-add it, we should get a new notification
set_response_without_filter()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(3)
assert not os.path.isfile("test-datastore/notification.txt")
set_response_with_filter()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(3)
assert os.path.isfile("test-datastore/notification.txt")
# Also test that the filter was updated after the first one was requested

View File

@@ -1,17 +1,18 @@
import os
import time
import re
from flask import url_for
from .util import set_original_response, live_server_setup, extract_UUID_from_client
from .util import set_original_response, live_server_setup
from changedetectionio.model import App
def set_response_with_filter():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div id="nope-doesnt-exist">Some text thats the same</div>
</body>
</html>
@@ -120,10 +121,6 @@ def run_filter_test(client, content_filter):
notification = f.read()
assert not 'CSS/xPath filter was not present in the page' in notification
# Re #1247 - All tokens got replaced
uuid = extract_UUID_from_client(client)
assert uuid in notification
# cleanup for the next
client.get(
url_for("form_delete", uuid="all"),
@@ -145,4 +142,4 @@ def test_check_xpath_filter_failure_notification(client, live_server):
time.sleep(1)
run_filter_test(client, '//*[@id="nope-doesnt-exist"]')
# Test that notification is never sent
# Test that notification is never sent

View File

@@ -6,11 +6,11 @@ from ..html_tools import html_to_text
def test_html_to_text_func():
test_html = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<a href="/first_link"> More Text </a>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<a href="second_link.com"> Even More Text </a>
</body>
</html>
@@ -21,7 +21,7 @@ def test_html_to_text_func():
no_links_text = \
"Some initial text\n\nWhich is across multiple " \
"lines\n\nMore Text\nSo let's see what happens.\nEven More Text"
"lines\n\nMore Text So let's see what happens. Even More Text"
# check that no links are in the extracted text
assert text_content == no_links_text
@@ -31,7 +31,7 @@ def test_html_to_text_func():
links_text = \
"Some initial text\n\nWhich is across multiple lines\n\n[ More Text " \
"](/first_link)\nSo let's see what happens.\n[ Even More Text ]" \
"](/first_link) So let's see what happens. [ Even More Text ]" \
"(second_link.com)"
# check that links are present in the extracted text

View File

@@ -1,5 +1,7 @@
#!/usr/bin/python3
import time
from flask import url_for
from . util import live_server_setup
from changedetectionio import html_tools
@@ -9,7 +11,7 @@ def test_setup(live_server):
# Unit test of the stripper
# Always we are dealing in utf-8
def test_strip_regex_text_func():
from ..processors import text_json_diff as fetch_site_status
from changedetectionio import fetch_site_status
test_content = """
but sometimes we want to remove the lines.

View File

@@ -11,8 +11,7 @@ def test_setup(live_server):
# Unit test of the stripper
# Always we are dealing in utf-8
def test_strip_text_func():
from ..processors import text_json_diff as fetch_site_status
from changedetectionio import fetch_site_status
test_content = """
Some content
@@ -34,10 +33,10 @@ def test_strip_text_func():
def set_original_ignore_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
</body>
</html>
@@ -50,10 +49,10 @@ def set_original_ignore_response():
def set_modified_original_ignore_response():
test_return_data = """<html>
<body>
Some NEW nice initial text<br>
Some NEW nice initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<p>new ignore stuff</p>
<p>blah</p>
</body>
@@ -69,11 +68,11 @@ def set_modified_original_ignore_response():
def set_modified_ignore_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<P>ZZZZz</P>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
</body>
</html>

View File

@@ -12,10 +12,10 @@ def test_setup(live_server):
def set_original_ignore_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<a href="/original_link"> Some More Text </a>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
</body>
</html>
"""
@@ -29,10 +29,10 @@ def set_original_ignore_response():
def set_modified_ignore_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<a href="/modified_link"> Some More Text </a>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
</body>
</html>
"""

View File

@@ -12,10 +12,10 @@ def test_setup(live_server):
def set_original_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
</body>
</html>
"""
@@ -27,10 +27,10 @@ def set_original_response():
def set_some_changed_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines, and a new thing too.</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
</body>
</html>
"""

View File

@@ -12,15 +12,15 @@ def test_setup(live_server):
def set_original_ignore_response_but_with_whitespace():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>
Which is across multiple lines</p>
<br>
<br>
</br>
So let's see what happens. <br>
So let's see what happens. </br>
</body>
@@ -34,10 +34,10 @@ def set_original_ignore_response_but_with_whitespace():
def set_original_ignore_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
</body>
</html>

View File

@@ -198,8 +198,8 @@ def test_check_json_without_filter(client, live_server):
)
# Should still see '"html": "<b>"'
assert b'&#34;html&#34;: &#34;&lt;b&gt;&#34;' in res.data
assert res.data.count(b'{') >= 2
assert b'&#34;&lt;b&gt;' in res.data
assert res.data.count(b'{\n') >= 2
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
@@ -394,48 +394,6 @@ def check_json_ext_filter(json_filter, client, live_server):
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_ignore_json_order(client, live_server):
# A change in order shouldn't trigger a notification
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write('{"hello" : 123, "world": 123}')
# Add our URL to the import page
test_url = url_for('test_endpoint', content_type="application/json", _external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(2)
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write('{"world" : 123, "hello": 123}')
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(2)
res = client.get(url_for("index"))
assert b'unviewed' not in res.data
# Just to be sure it still works
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write('{"world" : 123, "hello": 124}')
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(2)
res = client.get(url_for("index"))
assert b'unviewed' in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_check_jsonpath_ext_filter(client, live_server):
check_json_ext_filter('json:$[?(@.status==Sold)]', client, live_server)

View File

@@ -73,12 +73,16 @@ def test_check_notification(client, live_server):
# We write the PNG to disk, but a JPEG should appear in the notification
# Write the last screenshot png
testimage_png = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII='
# This one is created when we save the screenshot from the webdriver/playwright session (converted from PNG)
testimage_jpg = '/9j/4AAQSkZJRgABAQEASABIAAD/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/wAALCAABAAEBAREA/8QAFAABAAAAAAAAAAAAAAAAAAAACf/EABQQAQAAAAAAAAAAAAAAAAAAAAD/2gAIAQEAAD8AKp//2Q=='
uuid = extract_UUID_from_client(client)
datastore = 'test-datastore'
with open(os.path.join(datastore, str(uuid), 'last-screenshot.png'), 'wb') as f:
f.write(base64.b64decode(testimage_png))
with open(os.path.join(datastore, str(uuid), 'last-screenshot.jpg'), 'wb') as f:
f.write(base64.b64decode(testimage_jpg))
# Goto the edit page, add our ignore text
# Add our URL to the import page
@@ -96,8 +100,6 @@ def test_check_notification(client, live_server):
"Diff URL: {{diff_url}}\n"
"Snapshot: {{current_snapshot}}\n"
"Diff: {{diff}}\n"
"Diff Added: {{diff_added}}\n"
"Diff Removed: {{diff_removed}}\n"
"Diff Full: {{diff_full}}\n"
":-)",
"notification_screenshot": True,
@@ -145,7 +147,7 @@ def test_check_notification(client, live_server):
assert ':-)' in notification_submission
assert "Diff Full: Some initial text" in notification_submission
assert "Diff: (changed) Which is across multiple lines" in notification_submission
assert "(into) which has this one new line" in notification_submission
assert "(into ) which has this one new line" in notification_submission
# Re #342 - check for accidental python byte encoding of non-utf8/string
assert "b'" not in notification_submission
assert re.search('Watch UUID: [0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}', notification_submission, re.IGNORECASE)
@@ -158,12 +160,12 @@ def test_check_notification(client, live_server):
# Check the attachment was added, and that it is a JPEG from the original PNG
notification_submission_object = json.loads(notification_submission)
# We keep PNG screenshots for now
assert notification_submission_object['attachments'][0]['filename'] == 'last-screenshot.png'
assert notification_submission_object['attachments'][0]['filename'] == 'last-screenshot.jpg'
assert len(notification_submission_object['attachments'][0]['base64'])
assert notification_submission_object['attachments'][0]['mimetype'] == 'image/png'
assert notification_submission_object['attachments'][0]['mimetype'] == 'image/jpeg'
jpeg_in_attachment = base64.b64decode(notification_submission_object['attachments'][0]['base64'])
assert b'JFIF' in jpeg_in_attachment
assert testimage_png not in notification_submission
# Assert that the JPEG is readable (didn't get chewed up somewhere)
from PIL import Image
import io
@@ -295,10 +297,7 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
follow_redirects=True
)
assert b'Settings updated' in res.data
client.get(
url_for("form_delete", uuid="all"),
follow_redirects=True
)
# Add a watch and trigger a HTTP POST
test_url = url_for('test_endpoint', _external=True)
res = client.post(

View File

@@ -1,40 +0,0 @@
#!/usr/bin/python3
import time
from flask import url_for
from .util import set_original_response, set_modified_response, live_server_setup
sleep_time_for_fetch_thread = 3
# `subtractive_selectors` should still work in `source:` type requests
def test_fetch_pdf(client, live_server):
import shutil
shutil.copy("tests/test.pdf", "test-datastore/endpoint-test.pdf")
live_server_setup(live_server)
test_url = url_for('test_pdf_endpoint', _external=True)
# Add our URL to the import page
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(sleep_time_for_fetch_thread)
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True
)
assert b'PDF-1.5' not in res.data
assert b'hello world' in res.data
# So we know if the file changes in other ways
import hashlib
md5 = hashlib.md5(open("test-datastore/endpoint-test.pdf", 'rb').read()).hexdigest().upper()
# We should have one
assert len(md5) >0
# And it's going to be in the document
assert b'Document checksum - '+bytes(str(md5).encode('utf-8')) in res.data

View File

@@ -1,39 +0,0 @@
#!/usr/bin/python3
import time
from flask import url_for
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI
def test_rss_and_token(client, live_server):
set_original_response()
live_server_setup(live_server)
# Add our URL to the import page
res = client.post(
url_for("import_page"),
data={"urls": url_for('test_random_content_endpoint', _external=True)},
follow_redirects=True
)
assert b"1 Imported" in res.data
rss_token = extract_rss_token_from_UI(client)
time.sleep(2)
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(2)
# Add our URL to the import page
res = client.get(
url_for("rss", token="bad token", _external=True),
follow_redirects=True
)
assert b"Access denied, bad token" in res.data
res = client.get(
url_for("rss", token=rss_token, _external=True),
follow_redirects=True
)
assert b"Access denied, bad token" not in res.data
assert b"Random content" in res.data

View File

@@ -2,9 +2,11 @@ from flask import url_for
from . util import set_original_response, set_modified_response, live_server_setup
import time
def test_bad_access(client, live_server):
def test_setup(live_server):
live_server_setup(live_server)
def test_file_access(client, live_server):
res = client.post(
url_for("import_page"),
data={"urls": 'https://localhost'},
@@ -17,49 +19,18 @@ def test_bad_access(client, live_server):
res = client.post(
url_for("edit_page", uuid="first"),
data={
"url": 'javascript:alert(document.domain)',
"url": 'file:///etc/passwd',
"tag": "",
"method": "GET",
"fetch_backend": "html_requests",
"body": ""},
follow_redirects=True
)
time.sleep(3)
assert b'Watch protocol is not permitted by SAFE_PROTOCOL_REGEX' in res.data
res = client.post(
url_for("form_quick_watch_add"),
data={"url": ' javascript:alert(123)', "tag": ''},
res = client.get(
url_for("index", uuid="first"),
follow_redirects=True
)
assert b'Watch protocol is not permitted by SAFE_PROTOCOL_REGEX' in res.data
res = client.post(
url_for("form_quick_watch_add"),
data={"url": '%20%20%20javascript:alert(123)%20%20', "tag": ''},
follow_redirects=True
)
assert b'Watch protocol is not permitted by SAFE_PROTOCOL_REGEX' in res.data
res = client.post(
url_for("form_quick_watch_add"),
data={"url": ' source:javascript:alert(document.domain)', "tag": ''},
follow_redirects=True
)
assert b'Watch protocol is not permitted by SAFE_PROTOCOL_REGEX' in res.data
# file:// is permitted by default, but it will be caught by ALLOW_FILE_URI
client.post(
url_for("form_quick_watch_add"),
data={"url": 'file:///tasty/disk/drive', "tag": ''},
follow_redirects=True
)
time.sleep(1)
res = client.get(url_for("index"))
assert b'file:// type access is denied for security reasons.' in res.data
assert b'denied for security reasons' in res.data

View File

@@ -40,7 +40,7 @@ def test_check_basic_change_detection_functionality_source(client, live_server):
# Force recheck
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
assert b'1 watches queued for rechecking.' in res.data
assert b'1 watches are queued for rechecking.' in res.data
time.sleep(5)
@@ -90,4 +90,4 @@ def test_check_ignore_elements(client, live_server):
)
assert b'foobar-detection' not in res.data
assert b'&lt;br' not in res.data
assert b'&lt;p' in res.data
assert b'&lt;p' in res.data

View File

@@ -8,10 +8,10 @@ from . util import live_server_setup
def set_original_ignore_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
</body>
</html>
@@ -24,10 +24,10 @@ def set_original_ignore_response():
def set_modified_original_ignore_response():
test_return_data = """<html>
<body>
Some NEW nice initial text<br>
Some NEW nice initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
</body>
</html>
@@ -40,12 +40,12 @@ def set_modified_original_ignore_response():
def set_modified_with_trigger_text_response():
test_return_data = """<html>
<body>
Some NEW nice initial text<br>
Some NEW nice initial text</br>
<p>Which is across multiple lines</p>
<br>
</br>
Add to cart
<br>
So let's see what happens. <br>
<br/>
So let's see what happens. </br>
</body>
</html>
@@ -142,4 +142,4 @@ def test_trigger_functionality(client, live_server):
res = client.get(url_for("preview_page", uuid="first"))
# We should be able to see what we triggered on
assert b'<div class="triggered">Add to cart' in res.data
assert b'<div class="triggered">Add to cart' in res.data

View File

@@ -8,10 +8,10 @@ from . util import live_server_setup
def set_original_ignore_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
</body>
</html>
@@ -72,7 +72,7 @@ def test_trigger_regex_functionality(client, live_server):
assert b'unviewed' not in res.data
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write("regex test123<br>\nsomething 123")
f.write("regex test123<br/>\nsomething 123")
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(sleep_time_for_fetch_thread)
@@ -81,4 +81,4 @@ def test_trigger_regex_functionality(client, live_server):
# Cleanup everything
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
assert b'Deleted' in res.data

View File

@@ -8,10 +8,10 @@ from . util import live_server_setup
def set_original_ignore_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
</body>
</html>

View File

@@ -94,6 +94,7 @@ def test_unique_lines_functionality(client, live_server):
res = client.get(url_for("index"))
assert b'unviewed' not in res.data
# Now set the content which contains the new text and re-ordered existing text
set_modified_with_trigger_text_response()
client.get(url_for("form_watch_checknow"), follow_redirects=True)

View File

@@ -12,10 +12,10 @@ def test_setup(live_server):
def set_original_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<div class="sametext">Some text thats the same</div>
<div class="changetext">Some text that will change</div>
</body>
@@ -29,10 +29,10 @@ def set_original_response():
def set_modified_response():
test_return_data = """<html>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. THIS CHANGES AND SHOULDNT TRIGGER A CHANGE<br>
</br>
So let's see what happens. THIS CHANGES AND SHOULDNT TRIGGER A CHANGE</br>
<div class="sametext">Some text thats the same</div>
<div class="changetext">Some new text</div>
</body>

View File

@@ -13,51 +13,18 @@ class TestDiffBuilder(unittest.TestCase):
def test_expected_diff_output(self):
base_dir = os.path.dirname(__file__)
with open(base_dir + "/test-content/before.txt", 'r') as f:
previous_version_file_contents = f.read()
with open(base_dir + "/test-content/after.txt", 'r') as f:
newest_version_file_contents = f.read()
output = diff.render_diff(previous_version_file_contents=previous_version_file_contents,
newest_version_file_contents=newest_version_file_contents)
output = diff.render_diff(previous_file=base_dir + "/test-content/before.txt", newest_file=base_dir + "/test-content/after.txt")
output = output.split("\n")
self.assertIn('(changed) ok', output)
self.assertIn('(into) xok', output)
self.assertIn('(into) next-x-ok', output)
self.assertIn('(added) and something new', output)
self.assertIn('(into ) xok', output)
self.assertIn('(into ) next-x-ok', output)
self.assertIn('(added ) and something new', output)
with open(base_dir + "/test-content/after-2.txt", 'r') as f:
newest_version_file_contents = f.read()
output = diff.render_diff(previous_version_file_contents, newest_version_file_contents)
output = diff.render_diff(previous_file=base_dir + "/test-content/before.txt", newest_file=base_dir + "/test-content/after-2.txt")
output = output.split("\n")
self.assertIn('(removed) for having learned computerese,', output)
self.assertIn('(removed) I continue to examine bits, bytes and words', output)
#diff_removed
with open(base_dir + "/test-content/before.txt", 'r') as f:
previous_version_file_contents = f.read()
with open(base_dir + "/test-content/after.txt", 'r') as f:
newest_version_file_contents = f.read()
output = diff.render_diff(previous_version_file_contents, newest_version_file_contents, include_equal=False, include_removed=True, include_added=False)
output = output.split("\n")
self.assertIn('(changed) ok', output)
self.assertIn('(into) xok', output)
self.assertIn('(into) next-x-ok', output)
self.assertNotIn('(added) and something new', output)
#diff_removed
with open(base_dir + "/test-content/after-2.txt", 'r') as f:
newest_version_file_contents = f.read()
output = diff.render_diff(previous_version_file_contents, newest_version_file_contents, include_equal=False, include_removed=True, include_added=False)
output = output.split("\n")
self.assertIn('(removed) for having learned computerese,', output)
self.assertIn('(removed) I continue to examine bits, bytes and words', output)
# @todo test blocks of changed, blocks of added, blocks of removed

View File

@@ -9,10 +9,10 @@ def set_original_response():
test_return_data = """<html>
<head><title>head title</title></head>
<body>
Some initial text<br>
Some initial text</br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
<span class="foobar-detection" style='display:none'></span>
</body>
</html>
@@ -26,10 +26,10 @@ def set_modified_response():
test_return_data = """<html>
<head><title>modified head title</title></head>
<body>
Some initial text<br>
Some initial text</br>
<p>which has this one new line</p>
<br>
So let's see what happens. <br>
</br>
So let's see what happens. </br>
</body>
</html>
"""
@@ -43,11 +43,11 @@ def set_more_modified_response():
test_return_data = """<html>
<head><title>modified head title</title></head>
<body>
Some initial text<br>
Some initial text</br>
<p>which has this one new line</p>
<br>
So let's see what happens. <br>
Ohh yeah awesome<br>
</br>
So let's see what happens. </br>
Ohh yeah awesome<br/>
</body>
</html>
"""
@@ -70,15 +70,6 @@ def extract_api_key_from_UI(client):
api_key = m.group(1)
return api_key.strip()
# kinda funky, but works for now
def extract_rss_token_from_UI(client):
import re
res = client.get(
url_for("index"),
)
m = re.search('token=(.+?)"', str(res.data))
token_key = m.group(1)
return token_key.strip()
# kinda funky, but works for now
def extract_UUID_from_client(client):
@@ -107,12 +98,6 @@ def wait_for_all_checks(client):
def live_server_setup(live_server):
@live_server.app.route('/test-random-content-endpoint')
def test_random_content_endpoint():
import secrets
return "Random content - {}\n".format(secrets.token_hex(64))
@live_server.app.route('/test-endpoint')
def test_endpoint():
ctype = request.args.get('content_type')
@@ -183,15 +168,5 @@ def live_server_setup(live_server):
def test_return_query():
return request.query_string
@live_server.app.route('/endpoint-test.pdf')
def test_pdf_endpoint():
# Tried using a global var here but didn't seem to work, so reading from a file instead.
with open("test-datastore/endpoint-test.pdf", "rb") as f:
resp = make_response(f.read(), 200)
resp.headers['Content-Type'] = 'application/pdf'
return resp
live_server.start()

View File

@@ -52,12 +52,3 @@ def test_visual_selector_content_ready(client, live_server):
# Open it and see if it roughly looks correct
with open(os.path.join('test-datastore', uuid, 'elements.json'), 'r') as f:
json.load(f)
# Some options should be enabled
# @todo - in the future, the visibility should be toggled by JS from the request type setting
res = client.get(
url_for("edit_page", uuid="first"),
follow_redirects=True
)
assert b'notification_screenshot' in res.data

View File

@@ -4,8 +4,8 @@ import queue
import time
from changedetectionio import content_fetcher
from .processors.text_json_diff import FilterNotFoundInResponse
from changedetectionio import queuedWatchMetaData
from changedetectionio.fetch_site_status import FilterNotFoundInResponse
# A single update worker
#
@@ -65,32 +65,20 @@ class update_worker(threading.Thread):
if 'notification_urls' in n_object and n_object['notification_urls']:
# HTML needs linebreak, but MarkDown and Text can use a linefeed
if n_object['notification_format'] == 'HTML':
line_feed_sep = "<br>"
line_feed_sep = "</br>"
else:
line_feed_sep = "\n"
# Add text that was triggered
snapshot_contents = watch.get_history_snapshot(dates[-1])
trigger_text = watch.get('trigger_text', [])
triggered_text = ''
if len(trigger_text):
from . import html_tools
triggered_text = html_tools.get_triggered_text(content=snapshot_contents, trigger_text=trigger_text)
if triggered_text:
triggered_text = line_feed_sep.join(triggered_text)
with open(watch_history[dates[-1]], 'rb') as f:
snapshot_contents = f.read()
n_object.update({
'current_snapshot': snapshot_contents,
'diff': diff.render_diff(watch.get_history_snapshot(dates[-2]), watch.get_history_snapshot(dates[-1]), line_feed_sep=line_feed_sep),
'diff_added': diff.render_diff(watch.get_history_snapshot(dates[-2]), watch.get_history_snapshot(dates[-1]), include_removed=False, line_feed_sep=line_feed_sep),
'diff_full': diff.render_diff(watch.get_history_snapshot(dates[-2]), watch.get_history_snapshot(dates[-1]), include_equal=True, line_feed_sep=line_feed_sep),
'diff_removed': diff.render_diff(watch.get_history_snapshot(dates[-2]), watch.get_history_snapshot(dates[-1]), include_added=False, line_feed_sep=line_feed_sep),
'screenshot': watch.get_screenshot() if watch.get('notification_screenshot') else None,
'triggered_text': triggered_text,
'uuid': watch_uuid,
'watch_url': watch['url'],
'uuid': watch_uuid,
'screenshot': watch.get_screenshot_as_jpeg() if watch.get('notification_screenshot') else None,
'current_snapshot': snapshot_contents.decode('utf-8'),
'diff': diff.render_diff(watch_history[dates[-2]], watch_history[dates[-1]], line_feed_sep=line_feed_sep),
'diff_full': diff.render_diff(watch_history[dates[-2]], watch_history[dates[-1]], True, line_feed_sep=line_feed_sep)
})
logging.info (">> SENDING NOTIFICATION")
self.notification_q.put(n_object)
@@ -105,7 +93,7 @@ class update_worker(threading.Thread):
return
n_object = {'notification_title': 'Changedetection.io - Alert - CSS/xPath filter was not present in the page',
'notification_body': "Your configured CSS/xPath filters of '{}' for {{{{watch_url}}}} did not appear on the page after {} attempts, did the page change layout?\n\nLink: {{{{base_url}}}}/edit/{{{{watch_uuid}}}}\n\nThanks - Your omniscient changedetection.io installation :)\n".format(
'notification_body': "Your configured CSS/xPath filters of '{}' for {{watch_url}} did not appear on the page after {} attempts, did the page change layout?\n\nLink: {{base_url}}/edit/{{watch_uuid}}\n\nThanks - Your omniscient changedetection.io installation :)\n".format(
", ".join(watch['include_filters']),
threshold),
'notification_format': 'text'}
@@ -163,8 +151,9 @@ class update_worker(threading.Thread):
os.unlink(full_path)
def run(self):
from changedetectionio import fetch_site_status
from .processors import text_json_diff, restock_diff
update_handler = fetch_site_status.perform_site_check(datastore=self.datastore)
while not self.app.config.exit.is_set():
@@ -180,22 +169,14 @@ class update_worker(threading.Thread):
if uuid in list(self.datastore.data['watching'].keys()):
changed_detected = False
contents = b''
screenshot = False
update_obj= {}
xpath_data = False
process_changedetection_results = True
update_obj = {}
print("> Processing UUID {} Priority {} URL {}".format(uuid, queued_item_data.priority,
self.datastore.data['watching'][uuid]['url']))
print("> Processing UUID {} Priority {} URL {}".format(uuid, queued_item_data.priority, self.datastore.data['watching'][uuid]['url']))
now = time.time()
try:
processor = self.datastore.data['watching'][uuid].get('processor','text_json_diff')
# @todo some way to switch by name
if processor == 'restock_diff':
update_handler = restock_diff.perform_site_check(datastore=self.datastore)
else:
# Used as a default and also by some tests
update_handler = text_json_diff.perform_site_check(datastore=self.datastore)
changed_detected, update_obj, contents = update_handler.run(uuid, skip_when_checksum_same=queued_item_data.item.get('skip_when_checksum_same'))
# Re #342
# In Python 3, all strings are sequences of Unicode characters. There is a bytes type that holds raw bytes.
@@ -231,7 +212,9 @@ class update_worker(threading.Thread):
if e.page_text:
self.datastore.save_error_text(watch_uuid=uuid, contents=e.page_text)
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text})
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
# So that we get a trigger when the content is added again
'previous_md5': ''})
process_changedetection_results = False
except FilterNotFoundInResponse as e:
@@ -239,7 +222,9 @@ class update_worker(threading.Thread):
continue
err_text = "Warning, no filters were found, no change detection ran."
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text})
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
# So that we get a trigger when the content is added again
'previous_md5': ''})
# Only when enabled, send the notification
if self.datastore.data['watching'][uuid].get('filter_failure_notification_send', False):
@@ -256,12 +241,11 @@ class update_worker(threading.Thread):
self.datastore.update_watch(uuid=uuid, update_obj={'consecutive_filter_failures': c})
process_changedetection_results = False
process_changedetection_results = True
except content_fetcher.checksumFromPreviousCheckWasTheSame as e:
# Yes fine, so nothing todo, don't continue to process.
process_changedetection_results = False
changed_detected = False
# Yes fine, so nothing todo
pass
except content_fetcher.BrowserStepsStepTimout as e:
@@ -269,7 +253,9 @@ class update_worker(threading.Thread):
continue
err_text = "Warning, browser step at position {} could not run, target not found, check the watch, add a delay if necessary.".format(e.step_n+1)
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text})
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
# So that we get a trigger when the content is added again
'previous_md5': ''})
if self.datastore.data['watching'][uuid].get('filter_failure_notification_send', False):
@@ -285,7 +271,6 @@ class update_worker(threading.Thread):
c = 0
self.datastore.update_watch(uuid=uuid, update_obj={'consecutive_filter_failures': c})
process_changedetection_results = False
except content_fetcher.EmptyReply as e:
@@ -293,7 +278,6 @@ class update_worker(threading.Thread):
err_text = "EmptyReply - try increasing 'Wait seconds before extracting text', Status Code {}".format(e.status_code)
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code})
process_changedetection_results = False
except content_fetcher.ScreenshotUnavailable as e:
err_text = "Screenshot unavailable, page did not render fully in the expected time - try increasing 'Wait seconds before extracting text'"
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
@@ -305,7 +289,6 @@ class update_worker(threading.Thread):
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=e.screenshot, as_error=True)
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code})
process_changedetection_results = False
except content_fetcher.PageUnloadable as e:
err_text = "Page request from server didnt respond correctly"
if e.message:
@@ -316,7 +299,6 @@ class update_worker(threading.Thread):
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code})
process_changedetection_results = False
except Exception as e:
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
@@ -333,18 +315,18 @@ class update_worker(threading.Thread):
self.cleanup_error_artifacts(uuid)
#
# Different exceptions mean that we may or may not want to bump the snapshot, trigger notifications etc
if process_changedetection_results:
try:
watch = self.datastore.data['watching'].get(uuid)
self.datastore.update_watch(uuid=uuid, update_obj=update_obj)
watch = self.datastore.data['watching'][uuid]
fname = "" # Saved history text filename
# Also save the snapshot on the first time checked
# For the FIRST time we check a site, or a change detected, save the snapshot.
if changed_detected or not watch['last_checked']:
watch.save_history_text(contents=contents,
timestamp=str(round(time.time())),
snapshot_id=update_obj.get('previous_md5', 'none'))
# A change was detected
watch.save_history_text(contents=contents, timestamp=str(round(time.time())))
self.datastore.update_watch(uuid=uuid, update_obj=update_obj)
# A change was detected
if changed_detected:

View File

@@ -1,4 +1,4 @@
version: '3.2'
version: '2'
services:
changedetection:
image: ghcr.io/dgtlmoon/changedetection.io
@@ -41,6 +41,7 @@ services:
#
# Base URL of your changedetection.io install (Added to the notification alert)
# - BASE_URL=https://mysite.com
# Respect proxy_pass type settings, `proxy_set_header Host "localhost";` and `proxy_set_header X-Forwarded-Prefix /app;`
# More here https://github.com/dgtlmoon/changedetection.io/wiki/Running-changedetection.io-behind-a-reverse-proxy-sub-directory
# - USE_X_SETTINGS=1
@@ -94,10 +95,7 @@ services:
# - CHROME_REFRESH_TIME=600000
# - DEFAULT_BLOCK_ADS=true
# - DEFAULT_STEALTH=true
#
# Ignore HTTPS errors, like for self-signed certs
# - DEFAULT_IGNORE_HTTPS_ERRORS=true
#
volumes:
changedetection-data:

2
docs/.gitignore vendored
View File

@@ -1,2 +0,0 @@
package-lock.json
node_modules

View File

@@ -1,8 +0,0 @@
Directory of docs
To regenerate API docs
Run from this directory.
`node_modules/apidoc/bin/apidoc -i ../changedetectionio/api/ -o api_v1`

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.6 KiB

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

Before

Width:  |  Height:  |  Size: 566 B

Some files were not shown because too many files have changed in this diff Show More