Compare commits

..

2 Commits

Author SHA1 Message Date
dgtlmoon
df2c58d1ff Not needed 2022-06-11 22:55:00 +02:00
dgtlmoon
d29e0eea47 Also log normal notification activity, make logs easier to find 2022-06-11 22:52:52 +02:00
79 changed files with 4606 additions and 3503 deletions

View File

@@ -7,20 +7,6 @@ assignees: 'dgtlmoon'
---
**DO NOT USE THIS FORM TO REPORT THAT A PARTICULAR WEBSITE IS NOT SCRAPING/WATCHING AS EXPECTED**
This form is only for direct bugs and feature requests todo directly with the software.
Please report watched websites (full URL and _any_ settings) that do not work with changedetection.io as expected [**IN THE DISCUSSION FORUMS**](https://github.com/dgtlmoon/changedetection.io/discussions) or your report will be deleted
CONSIDER TAKING OUT A SUBSCRIPTION FOR A SMALL PRICE PER MONTH, YOU GET THE BENEFIT OF USING OUR PAID PROXIES AND FURTHERING THE DEVELOPMENT OF CHANGEDETECTION.IO
THANK YOU
**Describe the bug**
A clear and concise description of what the bug is.

View File

@@ -85,8 +85,8 @@ jobs:
version: latest
driver-opts: image=moby/buildkit:master
# master branch -> :dev container tag
- name: Build and push :dev
# master always builds :latest
- name: Build and push :latest
id: docker_build
if: ${{ github.ref }} == "refs/heads/master"
uses: docker/build-push-action@v2
@@ -95,12 +95,12 @@ jobs:
file: ./Dockerfile
push: true
tags: |
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest,ghcr.io/${{ github.repository }}:latest
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
# A new tagged release is required, which builds :tag and :latest
# A new tagged release is required, which builds :tag
- name: Build and push :tag
id: docker_build_tag_release
if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.')
@@ -110,10 +110,7 @@ jobs:
file: ./Dockerfile
push: true
tags: |
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:${{ github.event.release.tag_name }}
ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }}
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest
ghcr.io/dgtlmoon/changedetection.io:latest
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:${{ github.event.release.tag_name }},ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }}
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
@@ -128,3 +125,5 @@ jobs:
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-

View File

@@ -1,55 +0,0 @@
name: ChangeDetection.io Container Build Test
# Triggers the workflow on push or pull request events
# This line doesnt work, even tho it is the documented one
#on: [push, pull_request]
on:
push:
paths:
- requirements.txt
- Dockerfile
pull_request:
paths:
- requirements.txt
- Dockerfile
# Changes to requirements.txt packages and Dockerfile may or may not always be compatible with arm etc, so worth testing
# @todo: some kind of path filter for requirements.txt and Dockerfile
jobs:
test-container-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.9
# Just test that the build works, some libraries won't compile on ARM/rPi etc
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
with:
image: tonistiigi/binfmt:latest
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
with:
install: true
version: latest
driver-opts: image=moby/buildkit:master
- name: Test that the docker containers can build
id: docker_build
uses: docker/build-push-action@v2
# https://github.com/docker/build-push-action#customizing
with:
context: ./
file: ./Dockerfile
platforms: linux/arm/v7,linux/arm/v6,linux/amd64,linux/arm64,
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache

View File

@@ -1,25 +1,28 @@
name: ChangeDetection.io App Test
name: ChangeDetection.io Test
# Triggers the workflow on push or pull request events
on: [push, pull_request]
jobs:
test-application:
test-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.9
- name: Show env vars
run: set
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install flake8 pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
@@ -36,4 +39,7 @@ jobs:
# Each test is totally isolated and performs its own cleanup/reset
cd changedetectionio; ./run_all_tests.sh
# https://github.com/docker/build-push-action/blob/master/docs/advanced/test-before-push.md ?
# https://github.com/docker/buildx/issues/59 ? Needs to be one platform?
# https://github.com/docker/buildx/issues/495#issuecomment-918925854

View File

@@ -6,7 +6,7 @@ Otherwise, it's always best to PR into the `dev` branch.
Please be sure that all new functionality has a matching test!
Use `pytest` to validate/test, you can run the existing tests as `pytest tests/test_notification.py` for example
Use `pytest` to validate/test, you can run the existing tests as `pytest tests/test_notifications.py` for example
```
pip3 install -r requirements-dev

View File

@@ -5,14 +5,13 @@ FROM python:3.8-slim as builder
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
RUN apt-get update && apt-get install -y --no-install-recommends \
g++ \
libssl-dev \
libffi-dev \
gcc \
libc-dev \
libffi-dev \
libssl-dev \
libxslt-dev \
make \
zlib1g-dev
zlib1g-dev \
g++
RUN mkdir /install
WORKDIR /install
@@ -23,14 +22,9 @@ RUN pip install --target=/dependencies -r /requirements.txt
# Playwright is an alternative to Selenium
# Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing
RUN pip install --target=/dependencies playwright~=1.26 \
RUN pip install --target=/dependencies playwright~=1.20 \
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
RUN pip install --target=/dependencies jq~=1.3 \
|| echo "WARN: Failed to install JQ. The application can still run, but the Jq: filter option will be disabled."
# Final image stage
FROM python:3.8-slim
@@ -64,7 +58,6 @@ EXPOSE 5000
# The actual flask app
COPY changedetectionio /app/changedetectionio
# The eventlet server wrapper
COPY changedetection.py /app/changedetection.py

View File

@@ -2,7 +2,6 @@ recursive-include changedetectionio/api *
recursive-include changedetectionio/templates *
recursive-include changedetectionio/static *
recursive-include changedetectionio/model *
recursive-include changedetectionio/tests *
include changedetection.py
global-exclude *.pyc
global-exclude node_modules

View File

@@ -1,48 +1,45 @@
## Web Site Change Detection, Monitoring and Notification.
# changedetection.io
![changedetection.io](https://github.com/dgtlmoon/changedetection.io/actions/workflows/test-only.yml/badge.svg?branch=master)
<a href="https://hub.docker.com/r/dgtlmoon/changedetection.io" target="_blank" title="Change detection docker hub">
<img src="https://img.shields.io/docker/pulls/dgtlmoon/changedetection.io" alt="Docker Pulls"/>
</a>
<a href="https://hub.docker.com/r/dgtlmoon/changedetection.io" target="_blank" title="Change detection docker hub">
<img src="https://img.shields.io/github/v/release/dgtlmoon/changedetection.io" alt="Change detection latest tag version"/>
</a>
Live your data-life pro-actively, track website content changes and receive notifications via Discord, Email, Slack, Telegram and 70+ more
## Self-hosted open source change monitoring of web pages.
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start?src=pip)
_Know when web pages change! Stay ontop of new information!_
Live your data-life *pro-actively* instead of *re-actively*, do not rely on manipulative social media for consuming important information.
[**Don't have time? Let us host it for you! try our extremely affordable subscription use our proxies and support!**](https://lemonade.changedetection.io/start)
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />
**Get your own private instance now! Let us host it for you!**
[**Try our $6.99/month subscription - unlimited checks, watches and notifications!**](https://lemonade.changedetection.io/start), choose from different geographical locations, let us handle everything for you.
#### Example use cases
- Products and services have a change in pricing
- _Out of stock notification_ and _Back In stock notification_
- Governmental department updates (changes are often only on their websites)
Know when ...
- Government department updates (changes are often only on their websites)
- Local government news (changes are often only on their websites)
- New software releases, security advisories when you're not on their mailing list.
- Festivals with changes
- Realestate listing changes
- Know when your favourite whiskey is on sale, or other special deals are announced before anyone else
- COVID related news from government websites
- University/organisation news from their website
- Detect and monitor changes in JSON API responses
- JSON API monitoring and alerting
- Changes in legal and other documents
- Trigger API calls via notifications when text appears on a website
- Glue together APIs using the JSON filter and JSON notifications
- Create RSS feeds based on changes in web content
- Monitor HTML source code for unexpected changes, strengthen your PCI compliance
- You have a very sensitive list of URLs to watch and you do _not_ want to use the paid alternatives. (Remember, _you_ are the product)
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver and Playwright!</a>_
#### Key Features
- Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions!
- Target elements with xPath and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
- Switch between fast non-JS and Chrome JS based "fetchers"
- Easily specify how often a site should be checked
- Execute JS before extracting text (Good for logging in, see examples in the UI!)
- Override Request Headers, Specify `POST` or `GET` and other methods
- Use the "Visual Selector" to help target specific elements
- API monitoring and alerting
**Get monitoring now!**
```bash
$ pip3 install changedetection.io
$ pip3 install changedetection.io
```
Specify a target for the *datastore path* with `-d` (required) and a *listening port* with `-p` (defaults to `5000`)
@@ -54,5 +51,17 @@ $ changedetection.io -d /path/to/empty/data/dir -p 5000
Then visit http://127.0.0.1:5000 , You should now be able to access the UI.
### Features
- Website monitoring
- Change detection of content and analyses
- Filters on change (Select by CSS or JSON)
- Triggers (Wait for text, wait for regex)
- Notification support
- JSON API Monitoring
- Parse JSON embedded in HTML
- (Reverse) Proxy support
- Javascript support via WebDriver
- RaspberriPi (arm v6/v7/64 support)
See https://github.com/dgtlmoon/changedetection.io for more information.

View File

@@ -1,36 +1,38 @@
## Web Site Change Detection, Monitoring and Notification.
Live your data-life pro-actively, track website content changes and receive notifications via Discord, Email, Slack, Telegram and 70+ more
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start?src=github)
# changedetection.io
[![Release Version][release-shield]][release-link] [![Docker Pulls][docker-pulls]][docker-link] [![License][license-shield]](LICENSE.md)
![changedetection.io](https://github.com/dgtlmoon/changedetection.io/actions/workflows/test-only.yml/badge.svg?branch=master)
Know when important content changes, we support notifications via Discord, Telegram, Home-Assistant, Slack, Email and 70+ more
## Self-Hosted, Open Source, Change Monitoring of Web Pages
[**Don't have time? Let us host it for you! try our $6.99/month subscription - use our proxies and support!**](https://lemonade.changedetection.io/start) , _half the price of other website change monitoring services and comes with unlimited watches & checks!_
_Know when web pages change! Stay ontop of new information!_
- Chrome browser included.
- Super fast, no registration needed setup.
- Start watching and receiving change notifications instantly.
Live your data-life *pro-actively* instead of *re-actively*.
Free, Open-source web page monitoring, notification and change detection. Don't have time? [**Try our $6.99/month subscription - unlimited checks and watches!**](https://lemonade.changedetection.io/start)
Easily see what changed, examine by word, line, or individual character.
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start)
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot-diff.png" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />
**Get your own private instance now! Let us host it for you!**
[**Try our $6.99/month subscription - unlimited checks and watches!**](https://lemonade.changedetection.io/start) , _half the price of other website change monitoring services and comes with unlimited watches & checks!_
- Automatic Updates, Automatic Backups, No Heroku "paused application", don't miss a change!
- Javascript browser included
- Unlimited checks and watches!
#### Example use cases
- Products and services have a change in pricing
- _Out of stock notification_ and _Back In stock notification_
- Governmental department updates (changes are often only on their websites)
- New software releases, security advisories when you're not on their mailing list.
- Festivals with changes
- Realestate listing changes
- Know when your favourite whiskey is on sale, or other special deals are announced before anyone else
- COVID related news from government websites
- University/organisation news from their website
- Detect and monitor changes in JSON API responses
@@ -42,28 +44,21 @@ Easily see what changed, examine by word, line, or individual character.
- Monitor HTML source code for unexpected changes, strengthen your PCI compliance
- You have a very sensitive list of URLs to watch and you do _not_ want to use the paid alternatives. (Remember, _you_ are the product)
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver and Playwright!</a>_
#### Key Features
- Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions!
- Target elements with xPath and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
- Switch between fast non-JS and Chrome JS based "fetchers"
- Easily specify how often a site should be checked
- Execute JS before extracting text (Good for logging in, see examples in the UI!)
- Override Request Headers, Specify `POST` or `GET` and other methods
- Use the "Visual Selector" to help target specific elements
- Configurable [proxy per watch](https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration)
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $100 using our signup link.
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver!</a>_
## Screenshots
### Examine differences in content.
Easily see what changed, examine by word, line, or individual character.
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot-diff.png" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
### Filter by elements using the Visual Selector tool.
### Target elements with the Visual Selector tool.
Available when connected to a <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Playwright-content-fetcher">playwright content fetcher</a> (included as part of our subscription service)
Available when connected to a <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Playwright-content-fetcher">playwright content fetcher</a> (available also as part of our subscription service)
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/visualselector-anim.gif" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />
@@ -72,18 +67,14 @@ Available when connected to a <a href="https://github.com/dgtlmoon/changedetecti
### Docker
With Docker composer, just clone this repository and..
```bash
$ docker-compose up -d
```
Docker standalone
```bash
$ docker run -d --restart always -p "127.0.0.1:5000:5000" -v datastore-volume:/datastore --name changedetection.io dgtlmoon/changedetection.io
```
`:latest` tag is our latest stable release, `:dev` tag is our bleeding edge `master` branch.
### Windows
See the install instructions at the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Microsoft-Windows
@@ -121,9 +112,9 @@ See the wiki for more information https://github.com/dgtlmoon/changedetection.io
## Filters
XPath, JSONPath and CSS support comes baked in! You can be as specific as you need, use XPath exported from various XPath element query creation tools.
XPath, JSONPath, jq, and CSS support comes baked in! You can be as specific as you need, use XPath exported from various XPath element query creation tools.
(We support LXML `re:test`, `re:math` and `re:replace`.)
(We support LXML re:test, re:math and re:replace.)
## Notifications
@@ -151,7 +142,7 @@ Now you can also customise your notification content!
## JSON API Monitoring
Detect changes and monitor data in JSON API's by using either JSONPath or jq to filter, parse, and restructure JSON as needed.
Detect changes and monitor data in JSON API's by using the built-in JSONPath selectors as a filter / selector.
![image](https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/json-filter-field-example.png)
@@ -159,20 +150,9 @@ This will re-parse the JSON and apply formatting to the text, making it super ea
![image](https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/json-diff-example.png)
### JSONPath or jq?
For more complex parsing, filtering, and modifying of JSON data, jq is recommended due to the built-in operators and functions. Refer to the [documentation](https://stedolan.github.io/jq/manual/) for more specifc information on jq.
One big advantage of `jq` is that you can use logic in your JSON filter, such as filters to only show items that have a value greater than/less than etc.
See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/JSON-Selector-Filter-help for more information and examples
Note: `jq` library must be added separately (`pip3 install jq`)
### Parse JSON embedded in HTML!
When you enable a `json:` or `jq:` filter, you can even automatically extract and parse embedded JSON inside a HTML page! Amazingly handy for sites that build content based on JSON, such as many e-commerce websites.
When you enable a `json:` filter, you can even automatically extract and parse embedded JSON inside a HTML page! Amazingly handy for sites that build content based on JSON, such as many e-commerce websites.
```
<html>
@@ -182,7 +162,7 @@ When you enable a `json:` or `jq:` filter, you can even automatically extract an
</script>
```
`json:$.price` or `jq:.price` would give `23.50`, or you can extract the whole structure
`json:$.price` would give `23.50`, or you can extract the whole structure
## Proxy configuration

View File

@@ -6,36 +6,6 @@
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
from changedetectionio import changedetection
import multiprocessing
import signal
import os
def sigchld_handler(_signo, _stack_frame):
import sys
print('Shutdown: Got SIGCHLD')
# https://stackoverflow.com/questions/40453496/python-multiprocessing-capturing-signals-to-restart-child-processes-or-shut-do
pid, status = os.waitpid(-1, os.WNOHANG | os.WUNTRACED | os.WCONTINUED)
print('Sub-process: pid %d status %d' % (pid, status))
if status != 0:
sys.exit(1)
raise SystemExit
if __name__ == '__main__':
#signal.signal(signal.SIGCHLD, sigchld_handler)
# The only way I could find to get Flask to shutdown, is to wrap it and then rely on the subsystem issuing SIGTERM/SIGKILL
parse_process = multiprocessing.Process(target=changedetection.main)
parse_process.daemon = True
parse_process.start()
import time
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
#parse_process.terminate() not needed, because this process will issue it to the sub-process anyway
print ("Exited - CTRL+C")
changedetection.main()

View File

@@ -1,2 +1 @@
test-datastore
package-lock.json

View File

@@ -1,5 +1,16 @@
#!/usr/bin/python3
# @todo logging
# @todo extra options for url like , verify=False etc.
# @todo enable https://urllib3.readthedocs.io/en/latest/user-guide.html#ssl as option?
# @todo option for interval day/6 hour/etc
# @todo on change detected, config for calling some API
# @todo fetch title into json
# https://distill.io/features
# proxy per check
# - flask_cors, itsdangerous,MarkupSafe
import datetime
import os
import queue
@@ -33,7 +44,7 @@ from flask_wtf import CSRFProtect
from changedetectionio import html_tools
from changedetectionio.api import api_v1
__version__ = '0.39.20.4'
__version__ = '0.39.14'
datastore = None
@@ -43,7 +54,7 @@ ticker_thread = None
extra_stylesheets = []
update_q = queue.PriorityQueue()
update_q = queue.Queue()
notification_q = queue.Queue()
@@ -65,7 +76,7 @@ app.config['LOGIN_DISABLED'] = False
# Disables caching of the templates
app.config['TEMPLATES_AUTO_RELOAD'] = True
app.jinja_env.add_extension('jinja2.ext.loopcontrols')
csrf = CSRFProtect()
csrf.init_app(app)
@@ -97,26 +108,25 @@ def _jinja2_filter_datetime(watch_obj, format="%Y-%m-%d %H:%M:%S"):
# Worker thread tells us which UUID it is currently processing.
for t in running_update_threads:
if t.current_uuid == watch_obj['uuid']:
return '<span class="loader"></span><span> Checking now</span>'
return "Checking now.."
if watch_obj['last_checked'] == 0:
return 'Not yet'
return timeago.format(int(watch_obj['last_checked']), time.time())
# @app.context_processor
# def timeago():
# def _timeago(lower_time, now):
# return timeago.format(lower_time, now)
# return dict(timeago=_timeago)
@app.template_filter('format_timestamp_timeago')
def _jinja2_filter_datetimestamp(timestamp, format="%Y-%m-%d %H:%M:%S"):
if timestamp == False:
return 'Not yet'
return timeago.format(timestamp, time.time())
@app.template_filter('format_seconds_ago')
def _jinja2_filter_seconds_precise(timestamp):
if timestamp == False:
return 'Not yet'
return format(int(time.time()-timestamp), ',d')
# return timeago.format(timestamp, time.time())
# return datetime.datetime.utcfromtimestamp(timestamp).strftime(format)
# When nobody is logged in Flask-Login's current_user is set to an AnonymousUser object.
class User(flask_login.UserMixin):
@@ -194,9 +204,6 @@ def changedetection_app(config=None, datastore_o=None):
watch_api.add_resource(api_v1.Watch, '/api/v1/watch/<string:uuid>',
resource_class_kwargs={'datastore': datastore, 'update_q': update_q})
watch_api.add_resource(api_v1.SystemInfo, '/api/v1/systeminfo',
resource_class_kwargs={'datastore': datastore, 'update_q': update_q})
@@ -291,7 +298,7 @@ def changedetection_app(config=None, datastore_o=None):
# Sort by last_changed and add the uuid which is usually the key..
sorted_watches = []
# @todo needs a .itemsWithTag() or something - then we can use that in Jinaj2 and throw this away
# @todo needs a .itemsWithTag() or something
for uuid, watch in datastore.data['watching'].items():
if limit_tag != None:
@@ -306,7 +313,7 @@ def changedetection_app(config=None, datastore_o=None):
watch['uuid'] = uuid
sorted_watches.append(watch)
sorted_watches.sort(key=lambda x: x.last_changed, reverse=False)
sorted_watches.sort(key=lambda x: x['last_changed'], reverse=True)
fg = FeedGenerator()
fg.title('changedetection.io')
@@ -325,7 +332,7 @@ def changedetection_app(config=None, datastore_o=None):
if not watch.viewed:
# Re #239 - GUID needs to be individual for each event
# @todo In the future make this a configurable link back (see work on BASE_URL https://github.com/dgtlmoon/changedetection.io/pull/228)
guid = "{}/{}".format(watch['uuid'], watch.last_changed)
guid = "{}/{}".format(watch['uuid'], watch['last_changed'])
fe = fg.add_entry()
# Include a link to the diff page, they will have to login here to see if password protection is enabled.
@@ -354,7 +361,7 @@ def changedetection_app(config=None, datastore_o=None):
fe.pubDate(dt)
response = make_response(fg.rss_str())
response.headers.set('Content-Type', 'application/rss+xml;charset=utf-8')
response.headers.set('Content-Type', 'application/rss+xml')
return response
@app.route("/", methods=['GET'])
@@ -363,20 +370,20 @@ def changedetection_app(config=None, datastore_o=None):
from changedetectionio import forms
limit_tag = request.args.get('tag')
pause_uuid = request.args.get('pause')
# Redirect for the old rss path which used the /?rss=true
if request.args.get('rss'):
return redirect(url_for('rss', tag=limit_tag))
op = request.args.get('op')
if op:
uuid = request.args.get('uuid')
if op == 'pause':
datastore.data['watching'][uuid]['paused'] ^= True
elif op == 'mute':
datastore.data['watching'][uuid]['notification_muted'] ^= True
if pause_uuid:
try:
datastore.data['watching'][pause_uuid]['paused'] ^= True
datastore.needs_write = True
datastore.needs_write = True
return redirect(url_for('index', tag = limit_tag))
return redirect(url_for('index', tag = limit_tag))
except KeyError:
pass
# Sort by last_changed and add the uuid which is usually the key..
sorted_watches = []
@@ -396,9 +403,12 @@ def changedetection_app(config=None, datastore_o=None):
watch['uuid'] = uuid
sorted_watches.append(watch)
sorted_watches.sort(key=lambda x: x['last_changed'], reverse=True)
existing_tags = datastore.get_all_tags()
form = forms.quickWatchForm(request.form)
output = render_template("watch-overview.html",
form=form,
watches=sorted_watches,
@@ -409,7 +419,7 @@ def changedetection_app(config=None, datastore_o=None):
# Don't link to hosting when we're on the hosting environment
hosted_sticky=os.getenv("SALTED_PASS", False) == False,
guid=datastore.data['app_guid'],
queued_uuids=[uuid for p,uuid in update_q.queue])
queued_uuids=update_q.queue)
if session.get('share-link'):
@@ -423,9 +433,7 @@ def changedetection_app(config=None, datastore_o=None):
def ajax_callback_send_notification_test():
import apprise
from .apprise_asset import asset
apobj = apprise.Apprise(asset=asset)
apobj = apprise.Apprise()
# validate URLS
if not len(request.form['notification_urls'].strip()):
@@ -451,38 +459,37 @@ def changedetection_app(config=None, datastore_o=None):
return 'OK'
@app.route("/clear_history/<string:uuid>", methods=['GET'])
@app.route("/scrub/<string:uuid>", methods=['GET'])
@login_required
def clear_watch_history(uuid):
def scrub_watch(uuid):
try:
datastore.clear_watch_history(uuid)
datastore.scrub_watch(uuid)
except KeyError:
flash('Watch not found', 'error')
else:
flash("Cleared snapshot history for watch {}".format(uuid))
flash("Scrubbed watch {}".format(uuid))
return redirect(url_for('index'))
@app.route("/clear_history", methods=['GET', 'POST'])
@app.route("/scrub", methods=['GET', 'POST'])
@login_required
def clear_all_history():
def scrub_page():
if request.method == 'POST':
confirmtext = request.form.get('confirmtext')
if confirmtext == 'clear':
if confirmtext == 'scrub':
changes_removed = 0
for uuid in datastore.data['watching'].keys():
datastore.clear_watch_history(uuid)
#TODO: KeyError not checked, as it is above
datastore.scrub_watch(uuid)
flash("Cleared snapshot history for all watches")
flash("Cleared all snapshot history")
else:
flash('Incorrect confirmation text.', 'error')
return redirect(url_for('index'))
output = render_template("clear_all_history.html")
output = render_template("scrub.html")
return output
@@ -495,7 +502,7 @@ def changedetection_app(config=None, datastore_o=None):
from changedetectionio import fetch_site_status
# Get the most recent one
newest_history_key = datastore.data['watching'][uuid].get('newest_history_key')
newest_history_key = datastore.get_val(uuid, 'newest_history_key')
# 0 means that theres only one, so that there should be no 'unviewed' history available
if newest_history_key == 0:
@@ -544,13 +551,16 @@ def changedetection_app(config=None, datastore_o=None):
# be sure we update with a copy instead of accidently editing the live object by reference
default = deepcopy(datastore.data['watching'][uuid])
# Show system wide default if nothing configured
if datastore.data['watching'][uuid]['fetch_backend'] is None:
default['fetch_backend'] = datastore.data['settings']['application']['fetch_backend']
# Show system wide default if nothing configured
if all(value == 0 or value == None for value in datastore.data['watching'][uuid]['time_between_check'].values()):
default['time_between_check'] = deepcopy(datastore.data['settings']['requests']['time_between_check'])
# Defaults for proxy choice
if datastore.proxy_list is not None: # When enabled
# @todo
# Radio needs '' not None, or incase that the chosen one no longer exists
if default['proxy'] is None or not any(default['proxy'] in tup for tup in datastore.proxy_list):
default['proxy'] = ''
@@ -564,17 +574,11 @@ def changedetection_app(config=None, datastore_o=None):
# @todo - Couldn't get setattr() etc dynamic addition working, so remove it instead
del form.proxy
else:
form.proxy.choices = [('', 'Default')]
for p in datastore.proxy_list:
form.proxy.choices.append(tuple((p, datastore.proxy_list[p]['label'])))
form.proxy.choices = [('', 'Default')] + datastore.proxy_list
if request.method == 'POST' and form.validate():
extra_update_obj = {}
if request.args.get('unpause_on_save'):
extra_update_obj['paused'] = False
# Re #110, if they submit the same as the default value, set it to None, so we continue to follow the default
# Assume we use the default value, unless something relevant is different, then use the form value
# values could be None, 0 etc.
@@ -590,8 +594,10 @@ def changedetection_app(config=None, datastore_o=None):
if form.fetch_backend.data == datastore.data['settings']['application']['fetch_backend']:
extra_update_obj['fetch_backend'] = None
# Notification URLs
datastore.data['watching'][uuid]['notification_urls'] = form.notification_urls.data
# Ignore text
# Ignore text
form_ignore_text = form.ignore_text.data
datastore.data['watching'][uuid]['ignore_text'] = form_ignore_text
@@ -612,23 +618,24 @@ def changedetection_app(config=None, datastore_o=None):
datastore.data['watching'][uuid].update(form.data)
datastore.data['watching'][uuid].update(extra_update_obj)
if request.args.get('unpause_on_save'):
flash("Updated watch - unpaused!.")
else:
flash("Updated watch.")
flash("Updated watch.")
# Re #286 - We wait for syncing new data to disk in another thread every 60 seconds
# But in the case something is added we should save straight away
datastore.needs_write_urgent = True
# Queue the watch for immediate recheck, with a higher priority
update_q.put((1, uuid))
# Queue the watch for immediate recheck
update_q.put(uuid)
# Diff page [edit] link should go back to diff page
if request.args.get("next") and request.args.get("next") == 'diff':
if request.args.get("next") and request.args.get("next") == 'diff' and not form.save_and_preview_button.data:
return redirect(url_for('diff_history_page', uuid=uuid))
return redirect(url_for('index'))
else:
if form.save_and_preview_button.data:
flash('You may need to reload this page to see the new content.')
return redirect(url_for('preview_page', uuid=uuid))
else:
return redirect(url_for('index'))
else:
if request.method == 'POST' and not form.validate():
@@ -639,27 +646,17 @@ def changedetection_app(config=None, datastore_o=None):
# Only works reliably with Playwright
visualselector_enabled = os.getenv('PLAYWRIGHT_DRIVER_URL', False) and default['fetch_backend'] == 'html_webdriver'
# JQ is difficult to install on windows and must be manually added (outside requirements.txt)
jq_support = True
try:
import jq
except ModuleNotFoundError:
jq_support = False
output = render_template("edit.html",
uuid=uuid,
watch=datastore.data['watching'][uuid],
form=form,
has_empty_checktime=using_default_check_time,
using_global_webdriver_wait=default['webdriver_delay'] is None,
current_base_url=datastore.data['settings']['application']['base_url'],
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
form=form,
has_default_notification_urls=True if len(datastore.data['settings']['application']['notification_urls']) else False,
has_empty_checktime=using_default_check_time,
jq_support=jq_support,
playwright_enabled=os.getenv('PLAYWRIGHT_DRIVER_URL', False),
settings_application=datastore.data['settings']['application'],
using_global_webdriver_wait=default['webdriver_delay'] is None,
uuid=uuid,
visualselector_data_is_ready=visualselector_data_is_ready,
visualselector_enabled=visualselector_enabled,
watch=datastore.data['watching'][uuid],
visualselector_enabled=visualselector_enabled
)
return output
@@ -671,34 +668,26 @@ def changedetection_app(config=None, datastore_o=None):
default = deepcopy(datastore.data['settings'])
if datastore.proxy_list is not None:
available_proxies = list(datastore.proxy_list.keys())
# When enabled
system_proxy = datastore.data['settings']['requests']['proxy']
# In the case it doesnt exist anymore
if not system_proxy in available_proxies:
if not any([system_proxy in tup for tup in datastore.proxy_list]):
system_proxy = None
default['requests']['proxy'] = system_proxy if system_proxy is not None else available_proxies[0]
default['requests']['proxy'] = system_proxy if system_proxy is not None else datastore.proxy_list[0][0]
# Used by the form handler to keep or remove the proxy settings
default['proxy_list'] = available_proxies[0]
default['proxy_list'] = datastore.proxy_list
# Don't use form.data on POST so that it doesnt overrid the checkbox status from the POST status
form = forms.globalSettingsForm(formdata=request.form if request.method == 'POST' else None,
data=default
)
# Remove the last option 'System default'
form.application.form.notification_format.choices.pop()
if datastore.proxy_list is None:
# @todo - Couldn't get setattr() etc dynamic addition working, so remove it instead
del form.requests.form.proxy
else:
form.requests.form.proxy.choices = []
for p in datastore.proxy_list:
form.requests.form.proxy.choices.append(tuple((p, datastore.proxy_list[p]['label'])))
form.requests.form.proxy.choices = datastore.proxy_list
if request.method == 'POST':
# Password unset is a GET, but we can lock the session to a salted env password to always need the password
@@ -711,14 +700,7 @@ def changedetection_app(config=None, datastore_o=None):
return redirect(url_for('settings_page'))
if form.validate():
# Don't set password to False when a password is set - should be only removed with the `removepassword` button
app_update = dict(deepcopy(form.data['application']))
# Never update password with '' or False (Added by wtforms when not in submission)
if 'password' in app_update and not app_update['password']:
del (app_update['password'])
datastore.data['settings']['application'].update(app_update)
datastore.data['settings']['application'].update(form.data['application'])
datastore.data['settings']['requests'].update(form.data['requests'])
if not os.getenv("SALTED_PASS", False) and len(form.application.form.password.encrypted_password):
@@ -739,8 +721,7 @@ def changedetection_app(config=None, datastore_o=None):
current_base_url = datastore.data['settings']['application']['base_url'],
hide_remove_pass=os.getenv("SALTED_PASS", False),
api_key=datastore.data['settings']['application'].get('api_access_token'),
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
settings_application=datastore.data['settings']['application'])
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False))
return output
@@ -757,7 +738,7 @@ def changedetection_app(config=None, datastore_o=None):
importer = import_url_list()
importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore)
for uuid in importer.new_uuids:
update_q.put((1, uuid))
update_q.put(uuid)
if len(importer.remaining_data) == 0:
return redirect(url_for('index'))
@@ -770,7 +751,7 @@ def changedetection_app(config=None, datastore_o=None):
d_importer = import_distill_io_json()
d_importer.run(data=request.values.get('distill-io'), flash=flash, datastore=datastore)
for uuid in d_importer.new_uuids:
update_q.put((1, uuid))
update_q.put(uuid)
@@ -819,10 +800,8 @@ def changedetection_app(config=None, datastore_o=None):
newest_file = history[dates[-1]]
# Read as binary and force decode as UTF-8
# Windows may fail decode in python if we just use 'r' mode (chardet decode exception)
try:
with open(newest_file, 'r', encoding='utf-8', errors='ignore') as f:
with open(newest_file, 'r') as f:
newest_version_file_contents = f.read()
except Exception as e:
newest_version_file_contents = "Unable to read {}.\n".format(newest_file)
@@ -835,13 +814,13 @@ def changedetection_app(config=None, datastore_o=None):
previous_file = history[dates[-2]]
try:
with open(previous_file, 'r', encoding='utf-8', errors='ignore') as f:
with open(previous_file, 'r') as f:
previous_version_file_contents = f.read()
except Exception as e:
previous_version_file_contents = "Unable to read {}.\n".format(previous_file)
screenshot_url = watch.get_screenshot()
screenshot_url = datastore.get_screenshot(uuid)
system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver'
@@ -853,7 +832,7 @@ def changedetection_app(config=None, datastore_o=None):
newest=newest_version_file_contents,
previous=previous_version_file_contents,
extra_stylesheets=extra_stylesheets,
versions=dates[:-1], # All except current/last
versions=dates[1:],
uuid=uuid,
newest_version_timestamp=dates[-1],
current_previous_version=str(previous_version),
@@ -861,11 +840,7 @@ def changedetection_app(config=None, datastore_o=None):
extra_title=" - Diff - {}".format(watch['title'] if watch['title'] else watch['url']),
left_sticky=True,
screenshot=screenshot_url,
is_html_webdriver=is_html_webdriver,
last_error=watch['last_error'],
last_error_text=watch.get_error_text(),
last_error_screenshot=watch.get_error_snapshot()
)
is_html_webdriver=is_html_webdriver)
return output
@@ -880,82 +855,67 @@ def changedetection_app(config=None, datastore_o=None):
if uuid == 'first':
uuid = list(datastore.data['watching'].keys()).pop()
extra_stylesheets = [url_for('static_content', group='styles', filename='diff.css')]
try:
watch = datastore.data['watching'][uuid]
except KeyError:
flash("No history found for the specified link, bad link?", "error")
return redirect(url_for('index'))
system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver'
extra_stylesheets = [url_for('static_content', group='styles', filename='diff.css')]
if watch.history_n >0:
timestamps = sorted(watch.history.keys(), key=lambda x: int(x))
filename = watch.history[timestamps[-1]]
try:
with open(filename, 'r') as f:
tmp = f.readlines()
# Get what needs to be highlighted
ignore_rules = watch.get('ignore_text', []) + datastore.data['settings']['application']['global_ignore_text']
# .readlines will keep the \n, but we will parse it here again, in the future tidy this up
ignored_line_numbers = html_tools.strip_ignore_text(content="".join(tmp),
wordlist=ignore_rules,
mode='line numbers'
)
trigger_line_numbers = html_tools.strip_ignore_text(content="".join(tmp),
wordlist=watch['trigger_text'],
mode='line numbers'
)
# Prepare the classes and lines used in the template
i=0
for l in tmp:
classes=[]
i+=1
if i in ignored_line_numbers:
classes.append('ignored')
if i in trigger_line_numbers:
classes.append('triggered')
content.append({'line': l, 'classes': ' '.join(classes)})
except Exception as e:
content.append({'line': "File doesnt exist or unable to read file {}".format(filename), 'classes': ''})
else:
content.append({'line': "No history found", 'classes': ''})
screenshot_url = datastore.get_screenshot(uuid)
system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver'
is_html_webdriver = True if watch.get('fetch_backend') == 'html_webdriver' or (
watch.get('fetch_backend', None) is None and system_uses_webdriver) else False
# Never requested successfully, but we detected a fetch error
if datastore.data['watching'][uuid].history_n == 0 and (watch.get_error_text() or watch.get_error_snapshot()):
flash("Preview unavailable - No fetch/check completed or triggers not reached", "error")
output = render_template("preview.html",
content=content,
history_n=watch.history_n,
extra_stylesheets=extra_stylesheets,
# current_diff_url=watch['url'],
watch=watch,
uuid=uuid,
is_html_webdriver=is_html_webdriver,
last_error=watch['last_error'],
last_error_text=watch.get_error_text(),
last_error_screenshot=watch.get_error_snapshot())
return output
timestamp = list(watch.history.keys())[-1]
filename = watch.history[timestamp]
try:
with open(filename, 'r', encoding='utf-8', errors='ignore') as f:
tmp = f.readlines()
# Get what needs to be highlighted
ignore_rules = watch.get('ignore_text', []) + datastore.data['settings']['application']['global_ignore_text']
# .readlines will keep the \n, but we will parse it here again, in the future tidy this up
ignored_line_numbers = html_tools.strip_ignore_text(content="".join(tmp),
wordlist=ignore_rules,
mode='line numbers'
)
trigger_line_numbers = html_tools.strip_ignore_text(content="".join(tmp),
wordlist=watch['trigger_text'],
mode='line numbers'
)
# Prepare the classes and lines used in the template
i=0
for l in tmp:
classes=[]
i+=1
if i in ignored_line_numbers:
classes.append('ignored')
if i in trigger_line_numbers:
classes.append('triggered')
content.append({'line': l, 'classes': ' '.join(classes)})
except Exception as e:
content.append({'line': "File doesnt exist or unable to read file {}".format(filename), 'classes': ''})
output = render_template("preview.html",
content=content,
history_n=watch.history_n,
extra_stylesheets=extra_stylesheets,
ignored_line_numbers=ignored_line_numbers,
triggered_line_numbers=trigger_line_numbers,
current_diff_url=watch['url'],
screenshot=watch.get_screenshot(),
screenshot=screenshot_url,
watch=watch,
uuid=uuid,
is_html_webdriver=is_html_webdriver,
last_error=watch['last_error'],
last_error_text=watch.get_error_text(),
last_error_screenshot=watch.get_error_snapshot())
is_html_webdriver=is_html_webdriver)
return output
@@ -1055,12 +1015,11 @@ def changedetection_app(config=None, datastore_o=None):
if datastore.data['settings']['application']['password'] and not flask_login.current_user.is_authenticated:
abort(403)
screenshot_filename = "last-screenshot.png" if not request.args.get('error_screenshot') else "last-error-screenshot.png"
# These files should be in our subdirectory
try:
# set nocache, set content-type
response = make_response(send_from_directory(os.path.join(datastore_o.datastore_path, filename), screenshot_filename))
watch_dir = datastore_o.datastore_path + "/" + filename
response = make_response(send_from_directory(filename="last-screenshot.png", directory=watch_dir, path=watch_dir + "/last-screenshot.png"))
response.headers['Content-type'] = 'image/png'
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
response.headers['Pragma'] = 'no-cache'
@@ -1096,9 +1055,9 @@ def changedetection_app(config=None, datastore_o=None):
except FileNotFoundError:
abort(404)
@app.route("/form/add/quickwatch", methods=['POST'])
@app.route("/api/add", methods=['POST'])
@login_required
def form_quick_watch_add():
def form_watch_add():
from changedetectionio import forms
form = forms.quickWatchForm(request.form)
@@ -1111,19 +1070,13 @@ def changedetection_app(config=None, datastore_o=None):
flash('The URL {} already exists'.format(url), "error")
return redirect(url_for('index'))
add_paused = request.form.get('edit_and_watch_submit_button') != None
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip(), extras={'paused': add_paused})
if not add_paused and new_uuid:
# @todo add_watch should throw a custom Exception for validation etc
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip())
if new_uuid:
# Straight into the queue.
update_q.put((1, new_uuid))
update_q.put(new_uuid)
flash("Watch added.")
if add_paused:
flash('Watch added in Paused state, saving will unpause.')
return redirect(url_for('edit_page', uuid=new_uuid, unpause_on_save=1))
return redirect(url_for('index'))
@@ -1154,7 +1107,7 @@ def changedetection_app(config=None, datastore_o=None):
uuid = list(datastore.data['watching'].keys()).pop()
new_uuid = datastore.clone(uuid)
update_q.put((5, new_uuid))
update_q.put(new_uuid)
flash('Cloned.')
return redirect(url_for('index'))
@@ -1175,7 +1128,7 @@ def changedetection_app(config=None, datastore_o=None):
if uuid:
if uuid not in running_uuids:
update_q.put((1, uuid))
update_q.put(uuid)
i = 1
elif tag != None:
@@ -1183,7 +1136,7 @@ def changedetection_app(config=None, datastore_o=None):
for watch_uuid, watch in datastore.data['watching'].items():
if (tag != None and tag in watch['tag']):
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
update_q.put((1, watch_uuid))
update_q.put(watch_uuid)
i += 1
else:
@@ -1191,68 +1144,11 @@ def changedetection_app(config=None, datastore_o=None):
for watch_uuid, watch in datastore.data['watching'].items():
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
update_q.put((1, watch_uuid))
update_q.put(watch_uuid)
i += 1
flash("{} watches are queued for rechecking.".format(i))
return redirect(url_for('index', tag=tag))
@app.route("/form/checkbox-operations", methods=['POST'])
@login_required
def form_watch_list_checkbox_operations():
op = request.form['op']
uuids = request.form.getlist('uuids')
if (op == 'delete'):
for uuid in uuids:
uuid = uuid.strip()
if datastore.data['watching'].get(uuid):
datastore.delete(uuid.strip())
flash("{} watches deleted".format(len(uuids)))
elif (op == 'pause'):
for uuid in uuids:
uuid = uuid.strip()
if datastore.data['watching'].get(uuid):
datastore.data['watching'][uuid.strip()]['paused'] = True
flash("{} watches paused".format(len(uuids)))
elif (op == 'unpause'):
for uuid in uuids:
uuid = uuid.strip()
if datastore.data['watching'].get(uuid):
datastore.data['watching'][uuid.strip()]['paused'] = False
flash("{} watches unpaused".format(len(uuids)))
elif (op == 'mute'):
for uuid in uuids:
uuid = uuid.strip()
if datastore.data['watching'].get(uuid):
datastore.data['watching'][uuid.strip()]['notification_muted'] = True
flash("{} watches muted".format(len(uuids)))
elif (op == 'unmute'):
for uuid in uuids:
uuid = uuid.strip()
if datastore.data['watching'].get(uuid):
datastore.data['watching'][uuid.strip()]['notification_muted'] = False
flash("{} watches un-muted".format(len(uuids)))
elif (op == 'notification-default'):
from changedetectionio.notification import (
default_notification_format_for_watch
)
for uuid in uuids:
uuid = uuid.strip()
if datastore.data['watching'].get(uuid):
datastore.data['watching'][uuid.strip()]['notification_title'] = None
datastore.data['watching'][uuid.strip()]['notification_body'] = None
datastore.data['watching'][uuid.strip()]['notification_urls'] = []
datastore.data['watching'][uuid.strip()]['notification_format'] = default_notification_format_for_watch
flash("{} watches set to use default notification settings".format(len(uuids)))
return redirect(url_for('index'))
@app.route("/api/share-url", methods=['GET'])
@login_required
def form_share_put_watch():
@@ -1350,6 +1246,7 @@ def notification_runner():
global notification_debug_log
from datetime import datetime
import json
while not app.config.exit.is_set():
try:
# At the moment only one thread runs (single runner)
@@ -1360,12 +1257,11 @@ def notification_runner():
else:
now = datetime.now()
sent_obj = None
try:
from changedetectionio import notification
sent_obj = notification.process_notification(n_object, datastore)
notification.process_notification(n_object, datastore)
except Exception as e:
logging.error("Watch URL: {} Error {}".format(n_object['watch_url'], str(e)))
@@ -1379,19 +1275,14 @@ def notification_runner():
notification_debug_log += log_lines
# Process notifications
notification_debug_log+= ["{} - SENDING - {}".format(now.strftime("%Y/%m/%d %H:%M:%S,000"), json.dumps(sent_obj))]
notification_debug_log+= ["{} - SENDING {}".format(now.strftime("%Y/%m/%d %H:%M:%S,000"), json.dumps(n_object))]
# Trim the log length
notification_debug_log = notification_debug_log[-100:]
# Thread runner to check every minute, look for new watches to feed into the Queue.
def ticker_thread_check_time_launch_checks():
import random
from changedetectionio import update_worker
proxy_last_called_time = {}
recheck_time_minimum_seconds = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 20))
print("System env MINIMUM_SECONDS_RECHECK_TIME", recheck_time_minimum_seconds)
import logging
# Spin up Workers that do the fetching
# Can be overriden by ENV or use the default settings
@@ -1424,12 +1315,14 @@ def ticker_thread_check_time_launch_checks():
while update_q.qsize() >= 2000:
time.sleep(1)
recheck_time_system_seconds = int(datastore.threshold_seconds)
# Check for watches outside of the time threshold to put in the thread queue.
now = time.time()
recheck_time_minimum_seconds = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
recheck_time_system_seconds = datastore.threshold_seconds
for uuid in watch_uuid_list:
now = time.time()
watch = datastore.data['watching'].get(uuid)
if not watch:
logging.error("Watch: {} no longer present.".format(uuid))
@@ -1440,58 +1333,20 @@ def ticker_thread_check_time_launch_checks():
continue
# If they supplied an individual entry minutes to threshold.
threshold = now
watch_threshold_seconds = watch.threshold_seconds()
threshold = watch_threshold_seconds if watch_threshold_seconds > 0 else recheck_time_system_seconds
if watch_threshold_seconds:
threshold -= watch_threshold_seconds
else:
threshold -= recheck_time_system_seconds
# #580 - Jitter plus/minus amount of time to make the check seem more random to the server
jitter = datastore.data['settings']['requests'].get('jitter_seconds', 0)
if jitter > 0:
if watch.jitter_seconds == 0:
watch.jitter_seconds = random.uniform(-abs(jitter), jitter)
# Yeah, put it in the queue, it's more than time
if watch['last_checked'] <= max(threshold, recheck_time_minimum_seconds):
if not uuid in running_uuids and uuid not in update_q.queue:
update_q.put(uuid)
seconds_since_last_recheck = now - watch['last_checked']
if seconds_since_last_recheck >= (threshold + watch.jitter_seconds) and seconds_since_last_recheck >= recheck_time_minimum_seconds:
if not uuid in running_uuids and uuid not in [q_uuid for p,q_uuid in update_q.queue]:
# Proxies can be set to have a limit on seconds between which they can be called
watch_proxy = datastore.get_preferred_proxy_for_watch(uuid=uuid)
if watch_proxy and watch_proxy in list(datastore.proxy_list.keys()):
# Proxy may also have some threshold minimum
proxy_list_reuse_time_minimum = int(datastore.proxy_list.get(watch_proxy, {}).get('reuse_time_minimum', 0))
if proxy_list_reuse_time_minimum:
proxy_last_used_time = proxy_last_called_time.get(watch_proxy, 0)
time_since_proxy_used = int(time.time() - proxy_last_used_time)
if time_since_proxy_used < proxy_list_reuse_time_minimum:
# Not enough time difference reached, skip this watch
print("> Skipped UUID {} using proxy '{}', not enough time between proxy requests {}s/{}s".format(uuid,
watch_proxy,
time_since_proxy_used,
proxy_list_reuse_time_minimum))
continue
else:
# Record the last used time
proxy_last_called_time[watch_proxy] = int(time.time())
# Use Epoch time as priority, so we get a "sorted" PriorityQueue, but we can still push a priority 1 into it.
priority = int(time.time())
print(
"> Queued watch UUID {} last checked at {} queued at {:0.2f} priority {} jitter {:0.2f}s, {:0.2f}s since last checked".format(
uuid,
watch['last_checked'],
now,
priority,
watch.jitter_seconds,
now - watch['last_checked']))
# Into the queue with you
update_q.put((priority, uuid))
# Reset for next time
watch.jitter_seconds = 0
# Wait before checking the list again - saves CPU
time.sleep(1)
# Wait a few seconds before checking the list again
time.sleep(3)
# Should be low so we can break this out in testing
app.config.exit.wait(1)
app.config.exit.wait(1)

View File

@@ -24,7 +24,7 @@ class Watch(Resource):
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
if request.args.get('recheck'):
self.update_q.put((1, uuid))
self.update_q.put(uuid)
return "OK", 200
# Return without history, get that via another API call
@@ -100,7 +100,7 @@ class CreateWatch(Resource):
extras = {'title': json_data['title'].strip()} if json_data.get('title') else {}
new_uuid = self.datastore.add_watch(url=json_data['url'].strip(), tag=tag, extras=extras)
self.update_q.put((1, new_uuid))
self.update_q.put(new_uuid)
return {'uuid': new_uuid}, 201
# Return concise list of available watches and some very basic info
@@ -113,42 +113,12 @@ class CreateWatch(Resource):
list[k] = {'url': v['url'],
'title': v['title'],
'last_checked': v['last_checked'],
'last_changed': v.last_changed,
'last_changed': v['last_changed'],
'last_error': v['last_error']}
if request.args.get('recheck_all'):
for uuid in self.datastore.data['watching'].keys():
self.update_q.put((1, uuid))
self.update_q.put(uuid)
return {'status': "OK"}, 200
return list, 200
class SystemInfo(Resource):
def __init__(self, **kwargs):
# datastore is a black box dependency
self.datastore = kwargs['datastore']
self.update_q = kwargs['update_q']
@auth.check_token
def get(self):
import time
overdue_watches = []
# Check all watches and report which have not been checked but should have been
for uuid, watch in self.datastore.data.get('watching', {}).items():
# see if now - last_checked is greater than the time that should have been
# this is not super accurate (maybe they just edited it) but better than nothing
t = watch.threshold_seconds()
if not t:
t = self.datastore.threshold_seconds
time_since_check = time.time() - watch.get('last_checked')
if time_since_check > t:
overdue_watches.append(uuid)
return {
'queue_size': self.update_q.qsize(),
'overdue_watches': overdue_watches,
'uptime': round(time.time() - self.datastore.start_time, 2),
'watch_count': len(self.datastore.data.get('watching', {}))
}, 200

View File

@@ -1,11 +0,0 @@
import apprise
# Create our AppriseAsset and populate it with some of our new values:
# https://github.com/caronc/apprise/wiki/Development_API#the-apprise-asset-object
asset = apprise.AppriseAsset(
image_url_logo='https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png'
)
asset.app_id = "changedetection.io"
asset.app_desc = "ChangeDetection.io best and simplest website monitoring and change detection"
asset.app_url = "https://changedetection.io"

View File

@@ -4,7 +4,6 @@
import getopt
import os
import signal
import sys
import eventlet
@@ -12,21 +11,7 @@ import eventlet.wsgi
from . import store, changedetection_app, content_fetcher
from . import __version__
# Only global so we can access it in the signal handler
datastore = None
app = None
def sigterm_handler(_signo, _stack_frame):
global app
global datastore
# app.config.exit.set()
print('Shutdown: Got SIGTERM, DB saved to disk')
datastore.sync_to_json()
# raise SystemExit
def main():
global datastore
global app
ssl_mode = False
host = ''
port = os.environ.get('PORT') or 5000
@@ -50,6 +35,11 @@ def main():
create_datastore_dir = False
for opt, arg in opts:
# if opt == '--purge':
# Remove history, the actual files you need to delete manually.
# for uuid, watch in datastore.data['watching'].items():
# watch.update({'history': {}, 'last_checked': 0, 'last_changed': 0, 'previous_md5': None})
if opt == '-s':
ssl_mode = True
@@ -82,12 +72,9 @@ def main():
"Or use the -C parameter to create the directory.".format(app_config['datastore_path']), file=sys.stderr)
sys.exit(2)
datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], version_tag=__version__)
app = changedetection_app(app_config, datastore)
signal.signal(signal.SIGTERM, sigterm_handler)
# Go into cleanup mode
if do_cleanup:
datastore.remove_unused_snapshots()
@@ -102,14 +89,6 @@ def main():
has_password=datastore.data['settings']['application']['password'] != False
)
# Monitored websites will not receive a Referer header
# when a user clicks on an outgoing link.
@app.after_request
def hide_referrer(response):
if os.getenv("HIDE_REFERER", False):
response.headers["Referrer-Policy"] = "no-referrer"
return response
# Proxy sub-directory support
# Set environment var USE_X_SETTINGS=1 on this script
# And then in your proxy_pass settings
@@ -132,3 +111,4 @@ def main():
else:
eventlet.wsgi.server(eventlet.listen((host, int(port))), app)

View File

@@ -6,64 +6,38 @@ import requests
import time
import sys
class Non200ErrorCodeReceived(Exception):
def __init__(self, status_code, url, screenshot=None, xpath_data=None, page_html=None):
# Set this so we can use it in other parts of the app
self.status_code = status_code
self.url = url
self.screenshot = screenshot
self.xpath_data = xpath_data
self.page_text = None
if page_html:
from changedetectionio import html_tools
self.page_text = html_tools.html_to_text(page_html)
return
class JSActionExceptions(Exception):
def __init__(self, status_code, url, screenshot, message=''):
self.status_code = status_code
self.url = url
self.screenshot = screenshot
self.message = message
return
class PageUnloadable(Exception):
def __init__(self, status_code, url, screenshot=False, message=False):
def __init__(self, status_code, url):
# Set this so we can use it in other parts of the app
self.status_code = status_code
self.url = url
self.screenshot = screenshot
self.message = message
return
pass
class EmptyReply(Exception):
def __init__(self, status_code, url, screenshot=None):
def __init__(self, status_code, url):
# Set this so we can use it in other parts of the app
self.status_code = status_code
self.url = url
self.screenshot = screenshot
return
pass
class ScreenshotUnavailable(Exception):
def __init__(self, status_code, url, page_html=None):
def __init__(self, status_code, url):
# Set this so we can use it in other parts of the app
self.status_code = status_code
self.url = url
if page_html:
from html_tools import html_to_text
self.page_text = html_to_text(page_html)
return
pass
class ReplyWithContentButNoText(Exception):
def __init__(self, status_code, url, screenshot=None):
def __init__(self, status_code, url):
# Set this so we can use it in other parts of the app
self.status_code = status_code
self.url = url
self.screenshot = screenshot
return
pass
class Fetcher():
error = None
@@ -72,7 +46,6 @@ class Fetcher():
headers = None
fetcher_description = "No description"
webdriver_js_execute_code = None
xpath_element_js = """
// Include the getXpath script directly, easier than fetching
!function(e,n){"object"==typeof exports&&"undefined"!=typeof module?module.exports=n():"function"==typeof define&&define.amd?define(n):(e=e||self).getXPath=n()}(this,function(){return function(e){var n=e;if(n&&n.id)return'//*[@id="'+n.id+'"]';for(var o=[];n&&Node.ELEMENT_NODE===n.nodeType;){for(var i=0,r=!1,d=n.previousSibling;d;)d.nodeType!==Node.DOCUMENT_TYPE_NODE&&d.nodeName===n.nodeName&&i++,d=d.previousSibling;for(d=n.nextSibling;d;){if(d.nodeName===n.nodeName){r=!0;break}d=d.nextSibling}o.push((n.prefix?n.prefix+":":"")+n.localName+(i||r?"["+(i+1)+"]":"")),n=n.parentNode}return o.length?"/"+o.reverse().join("/"):""}});
@@ -89,12 +62,12 @@ class Fetcher():
break;
}
if('' !==r.id) {
chained_css.unshift("#"+CSS.escape(r.id));
final_selector= chained_css.join(' > ');
chained_css.unshift("#"+r.id);
final_selector= chained_css.join('>');
// Be sure theres only one, some sites have multiples of the same ID tag :-(
if (window.document.querySelectorAll(final_selector).length ==1 ) {
return final_selector;
}
}
return null;
} else {
chained_css.unshift(r.tagName.toLowerCase());
@@ -202,11 +175,12 @@ class Fetcher():
# Will be needed in the future by the VisualSelector, always get this where possible.
screenshot = False
fetcher_description = "No description"
system_http_proxy = os.getenv('HTTP_PROXY')
system_https_proxy = os.getenv('HTTPS_PROXY')
# Time ONTOP of the system defined env minimum time
render_extract_delay = 0
render_extract_delay=0
@abstractmethod
def get_error(self):
@@ -293,15 +267,7 @@ class base_html_playwright(Fetcher):
# allow per-watch proxy selection override
if proxy_override:
# https://playwright.dev/docs/network#http-proxy
from urllib.parse import urlparse
parsed = urlparse(proxy_override)
proxy_url = "{}://{}:{}".format(parsed.scheme, parsed.hostname, parsed.port)
self.proxy = {'server': proxy_url}
if parsed.username:
self.proxy['username'] = parsed.username
if parsed.password:
self.proxy['password'] = parsed.password
self.proxy = {'server': proxy_override}
def run(self,
url,
@@ -315,15 +281,13 @@ class base_html_playwright(Fetcher):
from playwright.sync_api import sync_playwright
import playwright._impl._api_types
from playwright._impl._api_types import Error, TimeoutError
response = None
with sync_playwright() as p:
browser_type = getattr(p, self.browser_type)
# Seemed to cause a connection Exception even tho I can see it connect
# self.browser = browser_type.connect(self.command_executor, timeout=timeout*1000)
# 60,000 connection timeout only
browser = browser_type.connect_over_cdp(self.command_executor, timeout=60000)
browser = browser_type.connect_over_cdp(self.command_executor, timeout=timeout * 1000)
# Set user agent to prevent Cloudflare from blocking the browser
# Use the default one configured in the App.py model that's passed from fetch_site_status.py
@@ -336,81 +300,40 @@ class base_html_playwright(Fetcher):
accept_downloads=False
)
if len(request_headers):
context.set_extra_http_headers(request_headers)
page = context.new_page()
try:
page.set_default_navigation_timeout(90000)
page.set_default_timeout(90000)
# Listen for all console events and handle errors
page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}"))
# Bug - never set viewport size BEFORE page.goto
# Waits for the next navigation. Using Python context manager
# prevents a race condition between clicking and waiting for a navigation.
with page.expect_navigation():
response = page.goto(url, wait_until='load')
# Bug - never set viewport size BEFORE page.goto
response = page.goto(url, timeout=timeout * 1000, wait_until='commit')
# Wait_until = commit
# - `'commit'` - consider operation to be finished when network response is received and the document started loading.
# Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
# This seemed to solve nearly all 'TimeoutErrors'
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
page.wait_for_timeout(extra_wait * 1000)
except playwright._impl._api_types.TimeoutError as e:
context.close()
browser.close()
# This can be ok, we will try to grab what we could retrieve
pass
raise EmptyReply(url=url, status_code=None)
except Exception as e:
print("other exception when page.goto")
print(str(e))
context.close()
browser.close()
raise PageUnloadable(url=url, status_code=None, message=e.message)
raise PageUnloadable(url=url, status_code=None)
if response is None:
context.close()
browser.close()
print("response object was none")
raise EmptyReply(url=url, status_code=None)
if len(page.content().strip()) == 0:
context.close()
browser.close()
raise EmptyReply(url=url, status_code=None)
# Removed browser-set-size, seemed to be needed to make screenshots work reliably in older playwright versions
# Was causing exceptions like 'waiting for page but content is changing' etc
# https://www.browserstack.com/docs/automate/playwright/change-browser-window-size 1280x720 should be the default
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
time.sleep(extra_wait)
# Bug 2(?) Set the viewport size AFTER loading the page
page.set_viewport_size({"width": 1280, "height": 1024})
if self.webdriver_js_execute_code is not None:
try:
page.evaluate(self.webdriver_js_execute_code)
except Exception as e:
# Is it possible to get a screenshot?
error_screenshot = False
try:
page.screenshot(type='jpeg',
clip={'x': 1.0, 'y': 1.0, 'width': 1280, 'height': 1024},
quality=1)
# The actual screenshot
error_screenshot = page.screenshot(type='jpeg',
full_page=True,
quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)))
except Exception as s:
pass
raise JSActionExceptions(status_code=response.status, screenshot=error_screenshot, message=str(e), url=url)
else:
# JS eval was run, now we also wait some time if possible to let the page settle
if self.render_extract_delay:
page.wait_for_timeout(self.render_extract_delay * 1000)
page.wait_for_timeout(500)
self.content = page.content()
self.status_code = response.status
self.content = page.content()
self.headers = response.all_headers()
if current_css_filter is not None:
@@ -423,31 +346,17 @@ class base_html_playwright(Fetcher):
# Bug 3 in Playwright screenshot handling
# Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it
# JPEG is better here because the screenshots can be very very large
# Screenshots also travel via the ws:// (websocket) meaning that the binary data is base64 encoded
# which will significantly increase the IO size between the server and client, it's recommended to use the lowest
# acceptable screenshot quality here
try:
# Quality set to 1 because it's not used, just used as a work-around for a bug, no need to change this.
page.screenshot(type='jpeg', clip={'x': 1.0, 'y': 1.0, 'width': 1280, 'height': 1024}, quality=1)
# The actual screenshot
self.screenshot = page.screenshot(type='jpeg', full_page=True, quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)))
page.screenshot(type='jpeg', clip={'x': 1.0, 'y': 1.0, 'width': 1280, 'height': 1024})
self.screenshot = page.screenshot(type='jpeg', full_page=True, quality=92)
except Exception as e:
context.close()
browser.close()
raise ScreenshotUnavailable(url=url, status_code=None)
if len(self.content.strip()) == 0:
context.close()
browser.close()
print("Content was empty")
raise EmptyReply(url=url, status_code=None, screenshot=self.screenshot)
context.close()
browser.close()
if not ignore_status_codes and self.status_code!=200:
raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, page_html=self.content, screenshot=self.screenshot)
class base_html_webdriver(Fetcher):
if os.getenv("WEBDRIVER_URL"):
@@ -519,11 +428,7 @@ class base_html_webdriver(Fetcher):
self.driver.set_window_size(1280, 1024)
self.driver.implicitly_wait(int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)))
if self.webdriver_js_execute_code is not None:
self.driver.execute_script(self.webdriver_js_execute_code)
# Selenium doesn't automatically wait for actions as good as Playwright, so wait again
self.driver.implicitly_wait(int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)))
self.screenshot = self.driver.get_screenshot_as_png()
# @todo - how to check this? is it possible?
self.status_code = 200
@@ -535,8 +440,6 @@ class base_html_webdriver(Fetcher):
self.content = self.driver.page_source
self.headers = {}
self.screenshot = self.driver.get_screenshot_as_png()
# Does the connection to the webdriver work? run a test connection.
def is_ready(self):
from selenium import webdriver
@@ -575,12 +478,7 @@ class html_requests(Fetcher):
ignore_status_codes=False,
current_css_filter=None):
# Make requests use a more modern looking user-agent
if not 'User-Agent' in request_headers:
request_headers['User-Agent'] = os.getenv("DEFAULT_SETTINGS_HEADERS_USERAGENT",
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36')
proxies = {}
proxies={}
# Allows override the proxy on a per-request basis
if self.proxy_override:
@@ -608,14 +506,10 @@ class html_requests(Fetcher):
if encoding:
r.encoding = encoding
if not r.content or not len(r.content):
raise EmptyReply(url=url, status_code=r.status_code)
# @todo test this
# @todo maybe you really want to test zero-byte return pages?
if r.status_code != 200 and not ignore_status_codes:
# maybe check with content works?
raise Non200ErrorCodeReceived(url=url, status_code=r.status_code, page_html=r.text)
if (not ignore_status_codes and not r) or not r.content or not len(r.content):
raise EmptyReply(url=url, status_code=r.status_code)
self.status_code = r.status_code
self.content = r.text

View File

@@ -1,5 +1,4 @@
import hashlib
import logging
import os
import re
import time
@@ -11,37 +10,48 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Some common stuff here that can be moved to a base class
# (set_proxy_from_list)
class perform_site_check():
screenshot = None
xpath_data = None
def __init__(self, *args, datastore, **kwargs):
super().__init__(*args, **kwargs)
self.datastore = datastore
# Doesn't look like python supports forward slash auto enclosure in re.findall
# So convert it to inline flag "foobar(?i)" type configuration
def forward_slash_enclosed_regex_to_options(self, regex):
res = re.search(r'^/(.*?)/(\w+)$', regex, re.IGNORECASE)
# If there was a proxy list enabled, figure out what proxy_args/which proxy to use
# if watch.proxy use that
# fetcher.proxy_override = watch.proxy or main config proxy
# Allows override the proxy on a per-request basis
# ALWAYS use the first one is nothing selected
if res:
regex = res.group(1)
regex += '(?{})'.format(res.group(2))
def set_proxy_from_list(self, watch):
proxy_args = None
if self.datastore.proxy_list is None:
return None
# If its a valid one
if any([watch['proxy'] in p for p in self.datastore.proxy_list]):
proxy_args = watch['proxy']
# not valid (including None), try the system one
else:
regex += '(?{})'.format('i')
system_proxy = self.datastore.data['settings']['requests']['proxy']
# Is not None and exists
if any([system_proxy in p for p in self.datastore.proxy_list]):
proxy_args = system_proxy
return regex
# Fallback - Did not resolve anything, use the first available
if proxy_args is None:
proxy_args = self.datastore.proxy_list[0][0]
return proxy_args
def run(self, uuid):
timestamp = int(time.time()) # used for storage etc too
changed_detected = False
screenshot = False # as bytes
stripped_text_from_html = ""
watch = self.datastore.data['watching'].get(uuid)
if not watch:
return
watch = self.datastore.data['watching'][uuid]
# Protect against file:// access
if re.search(r'^file', watch['url'], re.IGNORECASE) and not os.getenv('ALLOW_FILE_URI', False):
@@ -52,7 +62,7 @@ class perform_site_check():
# Unset any existing notification error
update_obj = {'last_notification_error': False, 'last_error': False}
extra_headers =self.datastore.data['watching'][uuid].get('headers')
extra_headers = self.datastore.get_val(uuid, 'headers')
# Tweak the base config with the per-watch ones
request_headers = self.datastore.data['settings']['headers'].copy()
@@ -64,11 +74,11 @@ class perform_site_check():
if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']:
request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
timeout = self.datastore.data['settings']['requests'].get('timeout')
url = watch.get('url')
request_body = self.datastore.data['watching'][uuid].get('body')
request_method = self.datastore.data['watching'][uuid].get('method')
ignore_status_codes = self.datastore.data['watching'][uuid].get('ignore_status_codes', False)
timeout = self.datastore.data['settings']['requests']['timeout']
url = self.datastore.get_val(uuid, 'url')
request_body = self.datastore.get_val(uuid, 'body')
request_method = self.datastore.get_val(uuid, 'method')
ignore_status_code = self.datastore.get_val(uuid, 'ignore_status_codes')
# source: support
is_source = False
@@ -84,13 +94,9 @@ class perform_site_check():
# If the klass doesnt exist, just use a default
klass = getattr(content_fetcher, "html_requests")
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
proxy_url = None
if proxy_id:
proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
print ("UUID {} Using proxy {}".format(uuid, proxy_url))
fetcher = klass(proxy_override=proxy_url)
proxy_args = self.set_proxy_from_list(watch)
fetcher = klass(proxy_override=proxy_args)
# Configurable per-watch or global extra delay before extracting text (for webDriver types)
system_webdriver_delay = self.datastore.data['settings']['application'].get('webdriver_delay', None)
@@ -99,15 +105,9 @@ class perform_site_check():
elif system_webdriver_delay is not None:
fetcher.render_extract_delay = system_webdriver_delay
if watch['webdriver_js_execute_code'] is not None and watch['webdriver_js_execute_code'].strip():
fetcher.webdriver_js_execute_code = watch['webdriver_js_execute_code']
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch['css_filter'])
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_code, watch['css_filter'])
fetcher.quit()
self.screenshot = fetcher.screenshot
self.xpath_data = fetcher.xpath_data
# Fetching complete, now filters
# @todo move to class / maybe inside of fetcher abstract base?
@@ -141,15 +141,12 @@ class perform_site_check():
has_filter_rule = True
if has_filter_rule:
json_filter_prefixes = ['json:', 'jq:']
if any(prefix in css_filter_rule for prefix in json_filter_prefixes):
stripped_text_from_html = html_tools.extract_json_as_string(content=fetcher.content, json_filter=css_filter_rule)
if 'json:' in css_filter_rule:
stripped_text_from_html = html_tools.extract_json_as_string(content=fetcher.content, jsonpath_filter=css_filter_rule)
is_html = False
if is_html or is_source:
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
fetcher.content = html_tools.workarounds_for_obfuscations(fetcher.content)
html_content = fetcher.content
# If not JSON, and if it's not text/plain..
@@ -192,7 +189,7 @@ class perform_site_check():
# Treat pages with no renderable text content as a change? No by default
empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0:
raise content_fetcher.ReplyWithContentButNoText(url=url, status_code=fetcher.get_last_status_code(), screenshot=screenshot)
raise content_fetcher.ReplyWithContentButNoText(url=url, status_code=200)
# We rely on the actual text in the html output.. many sites have random script vars etc,
# in the future we'll implement other mechanisms.
@@ -212,64 +209,44 @@ class perform_site_check():
if len(extract_text) > 0:
regex_matched_output = []
for s_re in extract_text:
# incase they specified something in '/.../x'
regex = self.forward_slash_enclosed_regex_to_options(s_re)
result = re.findall(regex.encode('utf-8'), stripped_text_from_html)
result = re.findall(s_re.encode('utf8'), stripped_text_from_html,
flags=re.MULTILINE | re.DOTALL | re.LOCALE)
if result:
regex_matched_output.append(result[0])
for l in result:
if type(l) is tuple:
#@todo - some formatter option default (between groups)
regex_matched_output += list(l) + [b'\n']
else:
# @todo - some formatter option default (between each ungrouped result)
regex_matched_output += [l] + [b'\n']
# Now we will only show what the regex matched
stripped_text_from_html = b''
text_content_before_ignored_filter = b''
if regex_matched_output:
# @todo some formatter for presentation?
stripped_text_from_html = b''.join(regex_matched_output)
stripped_text_from_html = b'\n'.join(regex_matched_output)
text_content_before_ignored_filter = stripped_text_from_html
# Re #133 - if we should strip whitespaces from triggering the change detected comparison
if self.datastore.data['settings']['application'].get('ignore_whitespace', False):
fetched_md5 = hashlib.md5(stripped_text_from_html.translate(None, b'\r\n\t ')).hexdigest()
else:
fetched_md5 = hashlib.md5(stripped_text_from_html).hexdigest()
############ Blocking rules, after checksum #################
blocked = False
# On the first run of a site, watch['previous_md5'] will be None, set it the current one.
if not watch.get('previous_md5'):
watch['previous_md5'] = fetched_md5
update_obj["previous_md5"] = fetched_md5
blocked_by_not_found_trigger_text = False
if len(watch['trigger_text']):
# Assume blocked
blocked = True
# Yeah, lets block first until something matches
blocked_by_not_found_trigger_text = True
# Filter and trigger works the same, so reuse it
# It should return the line numbers that match
result = html_tools.strip_ignore_text(content=str(stripped_text_from_html),
wordlist=watch['trigger_text'],
mode="line numbers")
# Unblock if the trigger was found
# If it returned any lines that matched..
if result:
blocked = False
blocked_by_not_found_trigger_text = False
if len(watch['text_should_not_be_present']):
# If anything matched, then we should block a change from happening
result = html_tools.strip_ignore_text(content=str(stripped_text_from_html),
wordlist=watch['text_should_not_be_present'],
mode="line numbers")
if result:
blocked = True
# The main thing that all this at the moment comes down to :)
if watch['previous_md5'] != fetched_md5:
if not blocked_by_not_found_trigger_text and watch['previous_md5'] != fetched_md5:
changed_detected = True
# Looks like something changed, but did it match all the rules?
if blocked:
changed_detected = False
update_obj["previous_md5"] = fetched_md5
update_obj["last_changed"] = timestamp
# Extract title as title
if is_html:
@@ -277,21 +254,4 @@ class perform_site_check():
if not watch['title'] or not len(watch['title']):
update_obj['title'] = html_tools.extract_element(find='title', html_content=fetcher.content)
if changed_detected:
if watch.get('check_unique_lines', False):
has_unique_lines = watch.lines_contain_something_unique_compared_to_history(lines=stripped_text_from_html.splitlines())
# One or more lines? unsure?
if not has_unique_lines:
logging.debug("check_unique_lines: UUID {} didnt have anything new setting change_detected=False".format(uuid))
changed_detected = False
else:
logging.debug("check_unique_lines: UUID {} had unique content".format(uuid))
# Always record the new checksum
update_obj["previous_md5"] = fetched_md5
# On the first run of a site, watch['previous_md5'] will be None, set it the current one.
if not watch.get('previous_md5'):
watch['previous_md5'] = fetched_md5
return changed_detected, update_obj, text_content_before_ignored_filter
return changed_detected, update_obj, text_content_before_ignored_filter, fetcher.screenshot, fetcher.xpath_data

View File

@@ -303,44 +303,22 @@ class ValidateCSSJSONXPATHInput(object):
# Re #265 - maybe in the future fetch the page and offer a
# warning/notice that its possible the rule doesnt yet match anything?
if not self.allow_json:
raise ValidationError("jq not permitted in this field!")
if 'jq:' in line:
try:
import jq
except ModuleNotFoundError:
# `jq` requires full compilation in windows and so isn't generally available
raise ValidationError("jq not support not found")
input = line.replace('jq:', '')
try:
jq.compile(input)
except (ValueError) as e:
message = field.gettext('\'%s\' is not a valid jq expression. (%s)')
raise ValidationError(message % (input, str(e)))
except:
raise ValidationError("A system-error occurred when validating your jq expression")
class quickWatchForm(Form):
url = fields.URLField('URL', validators=[validateURL()])
tag = StringField('Group tag', [validators.Optional()])
watch_submit_button = SubmitField('Watch', render_kw={"class": "pure-button pure-button-primary"})
edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"})
# Common to a single watch and the global settings
class commonSettingsForm(Form):
notification_urls = StringListField('Notification URL list', validators=[validators.Optional(), ValidateAppRiseServers()])
notification_title = StringField('Notification title', validators=[validators.Optional(), ValidateTokensList()])
notification_body = TextAreaField('Notification body', validators=[validators.Optional(), ValidateTokensList()])
notification_format = SelectField('Notification format', choices=valid_notification_formats.keys())
notification_urls = StringListField('Notification URL list', validators=[validators.Optional(), ValidateNotificationBodyAndTitleWhenURLisSet(), ValidateAppRiseServers()])
notification_title = StringField('Notification title', default=default_notification_title, validators=[validators.Optional(), ValidateTokensList()])
notification_body = TextAreaField('Notification body', default=default_notification_body, validators=[validators.Optional(), ValidateTokensList()])
notification_format = SelectField('Notification format', choices=valid_notification_formats.keys(), default=default_notification_format)
fetch_backend = RadioField(u'Fetch method', choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
extract_title_as_title = BooleanField('Extract <title> from document and use as watch title', default=False)
webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1,
message="Should contain one or more seconds")])
webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, message="Should contain one or more seconds")] )
class watchForm(commonSettingsForm):
@@ -362,19 +340,10 @@ class watchForm(commonSettingsForm):
body = TextAreaField('Request body', [validators.Optional()])
method = SelectField('Request method', choices=valid_method, default=default_method)
ignore_status_codes = BooleanField('Ignore status codes (process non-2xx status codes as normal)', default=False)
check_unique_lines = BooleanField('Only trigger when new lines appear', default=False)
trigger_text = StringListField('Trigger/wait for text', [validators.Optional(), ValidateListRegex()])
text_should_not_be_present = StringListField('Block change-detection if text matches', [validators.Optional(), ValidateListRegex()])
webdriver_js_execute_code = TextAreaField('Execute JavaScript before change detection', render_kw={"rows": "5"}, validators=[validators.Optional()])
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
save_and_preview_button = SubmitField('Save & Preview', render_kw={"class": "pure-button pure-button-primary"})
proxy = RadioField('Proxy')
filter_failure_notification_send = BooleanField(
'Send a notification when the filter can no longer be found on the page', default=False)
notification_muted = BooleanField('Notifications Muted / Off', default=False)
def validate(self, **kwargs):
if not super().validate():
@@ -394,9 +363,7 @@ class watchForm(commonSettingsForm):
class globalSettingsRequestForm(Form):
time_between_check = FormField(TimeBetweenCheckForm)
proxy = RadioField('Proxy')
jitter_seconds = IntegerField('Random jitter seconds ± check',
render_kw={"style": "width: 5em;"},
validators=[validators.NumberRange(min=0, message="Should contain zero or more seconds")])
# datastore.data['settings']['application']..
class globalSettingsApplicationForm(commonSettingsForm):
@@ -405,6 +372,7 @@ class globalSettingsApplicationForm(commonSettingsForm):
global_subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)])
global_ignore_text = StringListField('Ignore Text', [ValidateListRegex()])
ignore_whitespace = BooleanField('Ignore whitespace')
real_browser_save_screenshot = BooleanField('Save last screenshot when using Chrome?')
removepassword_button = SubmitField('Remove password', render_kw={"class": "pure-button pure-button-primary"})
empty_pages_are_a_change = BooleanField('Treat empty pages as a change?', default=False)
render_anchor_tag_content = BooleanField('Render anchor tag content', default=False)
@@ -412,11 +380,6 @@ class globalSettingsApplicationForm(commonSettingsForm):
api_access_token_enabled = BooleanField('API access token security check enabled', default=True, validators=[validators.Optional()])
password = SaltyPasswordField()
filter_failure_notification_threshold_attempts = IntegerField('Number of times the filter can be missing before sending a notification',
render_kw={"style": "width: 5em;"},
validators=[validators.NumberRange(min=0,
message="Should contain zero or more attempts")])
class globalSettingsForm(Form):
# Define these as FormFields/"sub forms", this way it matches the JSON storage

View File

@@ -1,29 +1,23 @@
from bs4 import BeautifulSoup
from inscriptis import get_text
from inscriptis.model.config import ParserConfig
from jsonpath_ng.ext import parse
from typing import List
import json
import re
from typing import List
from bs4 import BeautifulSoup
from jsonpath_ng.ext import parse
import re
from inscriptis import get_text
from inscriptis.model.config import ParserConfig
class FilterNotFoundInResponse(ValueError):
def __init__(self, msg):
ValueError.__init__(self, msg)
class JSONNotFound(ValueError):
def __init__(self, msg):
ValueError.__init__(self, msg)
# Given a CSS Rule, and a blob of HTML, return the blob of HTML that matches
def css_filter(css_filter, html_content):
soup = BeautifulSoup(html_content, "html.parser")
html_block = ""
r = soup.select(css_filter, separator="")
if len(html_content) > 0 and len(r) == 0:
raise FilterNotFoundInResponse(css_filter)
for item in r:
for item in soup.select(css_filter, separator=""):
html_block += str(item)
return html_block + "\n"
@@ -48,19 +42,8 @@ def xpath_filter(xpath_filter, html_content):
tree = html.fromstring(bytes(html_content, encoding='utf-8'))
html_block = ""
r = tree.xpath(xpath_filter.strip(), namespaces={'re': 'http://exslt.org/regular-expressions'})
if len(html_content) > 0 and len(r) == 0:
raise FilterNotFoundInResponse(xpath_filter)
#@note: //title/text() wont work where <title>CDATA..
for element in r:
if type(element) == etree._ElementStringResult:
html_block += str(element) + "<br/>"
elif type(element) == etree._ElementUnicodeResult:
html_block += str(element) + "<br/>"
else:
html_block += etree.tostring(element, pretty_print=True).decode('utf-8') + "<br/>"
for item in tree.xpath(xpath_filter.strip(), namespaces={'re':'http://exslt.org/regular-expressions'}):
html_block+= etree.tostring(item, pretty_print=True).decode('utf-8')+"<br/>"
return html_block
@@ -79,35 +62,19 @@ def extract_element(find='title', html_content=''):
return element_text
#
def _parse_json(json_data, json_filter):
if 'json:' in json_filter:
jsonpath_expression = parse(json_filter.replace('json:', ''))
match = jsonpath_expression.find(json_data)
return _get_stripped_text_from_json_match(match)
def _parse_json(json_data, jsonpath_filter):
s=[]
jsonpath_expression = parse(jsonpath_filter.replace('json:', ''))
match = jsonpath_expression.find(json_data)
if 'jq:' in json_filter:
try:
import jq
except ModuleNotFoundError:
# `jq` requires full compilation in windows and so isn't generally available
raise Exception("jq not support not found")
jq_expression = jq.compile(json_filter.replace('jq:', ''))
match = jq_expression.input(json_data).all()
return _get_stripped_text_from_json_match(match)
def _get_stripped_text_from_json_match(match):
s = []
# More than one result, we will return it as a JSON list.
if len(match) > 1:
for i in match:
s.append(i.value if hasattr(i, 'value') else i)
s.append(i.value)
# Single value, use just the value, as it could be later used in a token in notifications.
if len(match) == 1:
s = match[0].value if hasattr(match[0], 'value') else match[0]
s = match[0].value
# Re #257 - Better handling where it does not exist, in the case the original 's' value was False..
if not match:
@@ -119,16 +86,16 @@ def _get_stripped_text_from_json_match(match):
return stripped_text_from_html
def extract_json_as_string(content, json_filter):
def extract_json_as_string(content, jsonpath_filter):
stripped_text_from_html = False
# Try to parse/filter out the JSON, if we get some parser error, then maybe it's embedded <script type=ldjson>
try:
stripped_text_from_html = _parse_json(json.loads(content), json_filter)
stripped_text_from_html = _parse_json(json.loads(content), jsonpath_filter)
except json.JSONDecodeError:
# Foreach <script json></script> blob.. just return the first that matches json_filter
# Foreach <script json></script> blob.. just return the first that matches jsonpath_filter
s = []
soup = BeautifulSoup(content, 'html.parser')
bs_result = soup.findAll('script')
@@ -147,7 +114,7 @@ def extract_json_as_string(content, json_filter):
# Just skip it
continue
else:
stripped_text_from_html = _parse_json(json_data, json_filter)
stripped_text_from_html = _parse_json(json_data, jsonpath_filter)
if stripped_text_from_html:
break
@@ -235,17 +202,3 @@ def html_to_text(html_content: str, render_anchor_tag_content=False) -> str:
return text_content
def workarounds_for_obfuscations(content):
"""
Some sites are using sneaky tactics to make prices and other information un-renderable by Inscriptis
This could go into its own Pip package in the future, for faster updates
"""
# HomeDepot.com style <span>$<!-- -->90<!-- -->.<!-- -->74</span>
# https://github.com/weblyzard/inscriptis/issues/45
if not content:
return content
content = re.sub('<!--\s+-->', '', content)
return content

View File

@@ -1,24 +1,29 @@
from os import getenv
import collections
import os
import uuid as uuid_builder
from changedetectionio.notification import (
default_notification_body,
default_notification_format,
default_notification_title,
)
_FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT = 6
class model(dict):
base_config = {
'note': "Hello! If you change this file manually, please be sure to restart your changedetection.io instance!",
'watching': {},
'settings': {
'headers': {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'Accept-Encoding': 'gzip, deflate', # No support for brolti in python requests yet.
'Accept-Language': 'en-GB,en-US;q=0.9,en;'
},
'requests': {
'timeout': int(getenv("DEFAULT_SETTINGS_REQUESTS_TIMEOUT", "45")), # Default 45 seconds
'timeout': 15, # Default 15 seconds
'time_between_check': {'weeks': None, 'days': None, 'hours': 3, 'minutes': None, 'seconds': None},
'jitter_seconds': 0,
'workers': int(getenv("DEFAULT_SETTINGS_REQUESTS_WORKERS", "10")), # Number of threads, lower is better for slow connections
'workers': 10, # Number of threads, lower is better for slow connections
'proxy': None # Preferred proxy connection
},
'application': {
@@ -27,8 +32,7 @@ class model(dict):
'base_url' : None,
'extract_title_as_title': False,
'empty_pages_are_a_change': False,
'fetch_backend': getenv("DEFAULT_FETCH_BACKEND", "html_requests"),
'filter_failure_notification_threshold_attempts': _FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT,
'fetch_backend': os.getenv("DEFAULT_FETCH_BACKEND", "html_requests"),
'global_ignore_text': [], # List of text to ignore when calculating the comparison checksum
'global_subtractive_selectors': [],
'ignore_whitespace': True,
@@ -38,6 +42,7 @@ class model(dict):
'notification_title': default_notification_title,
'notification_body': default_notification_body,
'notification_format': default_notification_format,
'real_browser_save_screenshot': True,
'schema_version' : 0,
'webdriver_delay': None # Extra delay in seconds before extracting text
}

View File

@@ -1,22 +1,24 @@
import os
import uuid as uuid_builder
from distutils.util import strtobool
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
from changedetectionio.notification import (
default_notification_format_for_watch
default_notification_body,
default_notification_format,
default_notification_title,
)
class model(dict):
__newest_history_key = None
__history_n=0
__base_config = {
'url': None,
'tag': None,
'last_checked': 0,
'last_changed': 0,
'paused': False,
'last_viewed': 0, # history key value of the last viewed via the [diff] link
#'newest_history_key': 0,
@@ -30,37 +32,29 @@ class model(dict):
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
# Custom notification content
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
'notification_title': None,
'notification_body': None,
'notification_format': default_notification_format_for_watch,
'notification_muted': False,
'notification_title': default_notification_title,
'notification_body': default_notification_body,
'notification_format': default_notification_format,
'css_filter': '',
'last_error': False,
'extract_text': [], # Extract text by regex after filters
'subtractive_selectors': [],
'trigger_text': [], # List of text or regex to wait for until a change is detected
'text_should_not_be_present': [], # Text that should not present
'fetch_backend': None,
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
'extract_title_as_title': False,
'check_unique_lines': False, # On change-detected, compare against all history if its something new
'proxy': None, # Preferred proxy connection
# Re #110, so then if this is set to None, we know to use the default value instead
# Requires setting to None on submit if it's the same as the default
# Should be all None by default, so we use the system default in this case.
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None},
'webdriver_delay': None,
'webdriver_js_execute_code': None, # Run before change-detection
'webdriver_delay': None
}
jitter_seconds = 0
def __init__(self, *arg, **kw):
import uuid
self.update(self.__base_config)
self.__datastore_path = kw['datastore_path']
self['uuid'] = str(uuid_builder.uuid4())
self['uuid'] = str(uuid.uuid4())
del kw['datastore_path']
@@ -68,10 +62,7 @@ class model(dict):
self.update(kw['default'])
del kw['default']
# Be sure the cached timestamp is ready
bump = self.history
# Goes at the end so we update the default object with the initialiser
# goes at the end so we update the default object with the initialiser
super(model, self).__init__(*arg, **kw)
@property
@@ -81,28 +72,6 @@ class model(dict):
return False
def ensure_data_dir_exists(self):
target_path = os.path.join(self.__datastore_path, self['uuid'])
if not os.path.isdir(target_path):
print ("> Creating data dir {}".format(target_path))
os.mkdir(target_path)
@property
def label(self):
# Used for sorting
if self['title']:
return self['title']
return self['url']
@property
def last_changed(self):
# last_changed will be the newest snapshot, but when we have just one snapshot, it should be 0
if self.__history_n <= 1:
return 0
if self.__newest_history_key:
return int(self.__newest_history_key)
return 0
@property
def history_n(self):
return self.__history_n
@@ -116,12 +85,9 @@ class model(dict):
# Read the history file as a dict
fname = os.path.join(self.__datastore_path, self.get('uuid'), "history.txt")
if os.path.isfile(fname):
logging.debug("Reading history index " + str(time.time()))
logging.debug("Disk IO accessed " + str(time.time()))
with open(fname, "r") as f:
for i in f.readlines():
if ',' in i:
k, v = i.strip().split(',', 2)
tmp_history[k] = v
tmp_history = dict(i.strip().split(',', 2) for i in f.readlines())
if len(tmp_history):
self.__newest_history_key = list(tmp_history.keys())[-1]
@@ -148,36 +114,38 @@ class model(dict):
bump = self.history
return self.__newest_history_key
# Save some text file to the appropriate path and bump the history
# result_obj from fetch_site_status.run()
def save_history_text(self, contents, timestamp):
import uuid
from os import mkdir, path, unlink
import logging
output_path = os.path.join(self.__datastore_path, self['uuid'])
output_path = "{}/{}".format(self.__datastore_path, self['uuid'])
self.ensure_data_dir_exists()
snapshot_fname = os.path.join(output_path, str(uuid.uuid4()))
# Incase the operator deleted it, check and create.
if not os.path.isdir(output_path):
mkdir(output_path)
snapshot_fname = "{}/{}.stripped.txt".format(output_path, uuid.uuid4())
logging.debug("Saving history text {}".format(snapshot_fname))
# in /diff/ and /preview/ we are going to assume for now that it's UTF-8 when reading
# most sites are utf-8 and some are even broken utf-8
with open(snapshot_fname, 'wb') as f:
f.write(contents)
f.close()
# Append to index
# @todo check last char was \n
index_fname = os.path.join(output_path, "history.txt")
index_fname = "{}/history.txt".format(output_path)
with open(index_fname, 'a') as f:
f.write("{},{}\n".format(timestamp, snapshot_fname))
f.close()
self.__newest_history_key = timestamp
self.__history_n += 1
self.__history_n+=1
# @todo bump static cache of the last timestamp so we dont need to examine the file to set a proper ''viewed'' status
#@todo bump static cache of the last timestamp so we dont need to examine the file to set a proper ''viewed'' status
return snapshot_fname
@property
@@ -189,70 +157,9 @@ class model(dict):
def threshold_seconds(self):
seconds = 0
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
for m, n in mtable.items():
x = self.get('time_between_check', {}).get(m, None)
if x:
seconds += x * n
return seconds
# Iterate over all history texts and see if something new exists
def lines_contain_something_unique_compared_to_history(self, lines: list):
local_lines = set([l.decode('utf-8').strip().lower() for l in lines])
# Compare each lines (set) against each history text file (set) looking for something new..
existing_history = set({})
for k, v in self.history.items():
alist = set([line.decode('utf-8').strip().lower() for line in open(v, 'rb')])
existing_history = existing_history.union(alist)
# Check that everything in local_lines(new stuff) already exists in existing_history - it should
# if not, something new happened
return not local_lines.issubset(existing_history)
def get_screenshot(self):
fname = os.path.join(self.__datastore_path, self['uuid'], "last-screenshot.png")
if os.path.isfile(fname):
return fname
return False
def __get_file_ctime(self, filename):
fname = os.path.join(self.__datastore_path, self['uuid'], filename)
if os.path.isfile(fname):
return int(os.path.getmtime(fname))
return False
@property
def error_text_ctime(self):
return self.__get_file_ctime('last-error.txt')
@property
def snapshot_text_ctime(self):
if self.history_n==0:
return False
timestamp = list(self.history.keys())[-1]
return int(timestamp)
@property
def snapshot_screenshot_ctime(self):
return self.__get_file_ctime('last-screenshot.png')
@property
def snapshot_error_screenshot_ctime(self):
return self.__get_file_ctime('last-error-screenshot.png')
def get_error_text(self):
"""Return the text saved from a previous request that resulted in a non-200 error"""
fname = os.path.join(self.__datastore_path, self['uuid'], "last-error.txt")
if os.path.isfile(fname):
with open(fname, 'r') as f:
return f.read()
return False
def get_error_snapshot(self):
"""Return path to the screenshot that resulted in a non-200 error"""
fname = os.path.join(self.__datastore_path, self['uuid'], "last-error-screenshot.png")
if os.path.isfile(fname):
return fname
return False

View File

@@ -14,19 +14,16 @@ valid_tokens = {
'current_snapshot': ''
}
default_notification_format_for_watch = 'System default'
default_notification_format = 'Text'
default_notification_body = '{watch_url} had a change.\n---\n{diff}\n---\n'
default_notification_title = 'ChangeDetection.io Notification - {watch_url}'
valid_notification_formats = {
'Text': NotifyFormat.TEXT,
'Markdown': NotifyFormat.MARKDOWN,
'HTML': NotifyFormat.HTML,
# Used only for editing a watch (not for global)
default_notification_format_for_watch: default_notification_format_for_watch
}
default_notification_format = 'Text'
default_notification_body = '{watch_url} had a change.\n---\n{diff}\n---\n'
default_notification_title = 'ChangeDetection.io Notification - {watch_url}'
def process_notification(n_object, datastore):
# Get the notification body from datastore
@@ -37,6 +34,7 @@ def process_notification(n_object, datastore):
valid_notification_formats[default_notification_format],
)
# Insert variables into the notification content
notification_parameters = create_notification_parameters(n_object, datastore)
@@ -50,10 +48,9 @@ def process_notification(n_object, datastore):
# Anything higher than or equal to WARNING (which covers things like Connection errors)
# raise it as an exception
apobjs=[]
sent_objs=[]
from .apprise_asset import asset
for url in n_object['notification_urls']:
apobj = apprise.Apprise(debug=True, asset=asset)
apobj = apprise.Apprise(debug=True)
url = url.strip()
if len(url):
print(">> Process Notification: AppRise notifying {}".format(url))
@@ -66,36 +63,23 @@ def process_notification(n_object, datastore):
# So if no avatar_url is specified, add one so it can be correctly calculated into the total payload
k = '?' if not '?' in url else '&'
if not 'avatar_url' in url and not url.startswith('mail'):
if not 'avatar_url' in url:
url += k + 'avatar_url=https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png'
if url.startswith('tgram://'):
# Telegram only supports a limit subset of HTML, remove the '<br/>' we place in.
# re https://github.com/dgtlmoon/changedetection.io/issues/555
# @todo re-use an existing library we have already imported to strip all non-allowed tags
n_body = n_body.replace('<br/>', '\n')
n_body = n_body.replace('</br>', '\n')
# real limit is 4096, but minus some for extra metadata
payload_max_size = 3600
body_limit = max(0, payload_max_size - len(n_title))
n_title = n_title[0:payload_max_size]
n_body = n_body[0:body_limit]
elif url.startswith('discord://') or url.startswith('https://discordapp.com/api/webhooks') or url.startswith('https://discord.com/api'):
elif url.startswith('discord://'):
# real limit is 2000, but minus some for extra metadata
payload_max_size = 1700
body_limit = max(0, payload_max_size - len(n_title))
n_title = n_title[0:payload_max_size]
n_body = n_body[0:body_limit]
elif url.startswith('mailto'):
# Apprise will default to HTML, so we need to override it
# So that whats' generated in n_body is in line with what is going to be sent.
# https://github.com/caronc/apprise/issues/633#issuecomment-1191449321
if not 'format=' in url and (n_format == 'text' or n_format == 'markdown'):
prefix = '?' if not '?' in url else '&'
url = "{}{}format={}".format(url, prefix, n_format)
apobj.add(url)
apobj.notify(
@@ -112,14 +96,7 @@ def process_notification(n_object, datastore):
log_value = logs.getvalue()
if log_value and 'WARNING' in log_value or 'ERROR' in log_value:
raise Exception(log_value)
sent_objs.append({'title': n_title,
'body': n_body,
'url' : url,
'body_format': n_format})
# Return what was sent for better logging - after the for loop
return sent_objs
# Notification title + body content parameters get created here.

View File

@@ -9,8 +9,6 @@
# exit when any command fails
set -e
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
find tests/test_*py -type f|while read test_name
do
echo "TEST RUNNING $test_name"
@@ -25,13 +23,6 @@ export BASE_URL="https://really-unique-domain.io"
pytest tests/test_notification.py
## JQ + JSON: filter test
# jq is not available on windows and we should just test it when the package is installed
# this will re-test with jq support
pip3 install jq~=1.3
pytest tests/test_jsonpath_jq_selector.py
# Now for the selenium and playwright/browserless fetchers
# Note - this is not UI functional tests - just checking that each one can fetch the content
@@ -41,66 +32,16 @@ docker run -d --name $$-test_selenium -p 4444:4444 --rm --shm-size="2g" seleni
sleep 5
export WEBDRIVER_URL=http://localhost:4444/wd/hub
pytest tests/fetchers/test_content.py
pytest tests/test_errorhandling.py
unset WEBDRIVER_URL
docker kill $$-test_selenium
echo "TESTING WEBDRIVER FETCH > PLAYWRIGHT/BROWSERLESS..."
# Not all platforms support playwright (not ARM/rPI), so it's not packaged in requirements.txt
PLAYWRIGHT_VERSION=$(grep -i -E "RUN pip install.+" "$SCRIPT_DIR/../Dockerfile" | grep --only-matching -i -E "playwright[=><~+]+[0-9\.]+")
echo "using $PLAYWRIGHT_VERSION"
pip3 install "$PLAYWRIGHT_VERSION"
pip3 install playwright~=1.22
docker run -d --name $$-test_browserless -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.53-chrome-stable
# takes a while to spin up
sleep 5
export PLAYWRIGHT_DRIVER_URL=ws://127.0.0.1:3000
pytest tests/fetchers/test_content.py
pytest tests/test_errorhandling.py
pytest tests/visualselector/test_fetch_data.py
unset PLAYWRIGHT_DRIVER_URL
docker kill $$-test_browserless
# Test proxy list handling, starting two squids on different ports
# Each squid adds a different header to the response, which is the main thing we test for.
docker run -d --name $$-squid-one --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf -p 3128:3128 ubuntu/squid:4.13-21.10_edge
docker run -d --name $$-squid-two --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf -p 3129:3128 ubuntu/squid:4.13-21.10_edge
# So, basic HTTP as env var test
export HTTP_PROXY=http://localhost:3128
export HTTPS_PROXY=http://localhost:3128
pytest tests/proxy_list/test_proxy.py
docker logs $$-squid-one 2>/dev/null|grep one.changedetection.io
if [ $? -ne 0 ]
then
echo "Did not see a request to one.changedetection.io in the squid logs (while checking env vars HTTP_PROXY/HTTPS_PROXY)"
fi
unset HTTP_PROXY
unset HTTPS_PROXY
# 2nd test actually choose the preferred proxy from proxies.json
cp tests/proxy_list/proxies.json-example ./test-datastore/proxies.json
# Makes a watch use a preferred proxy
pytest tests/proxy_list/test_multiple_proxy.py
# Should be a request in the default "first" squid
docker logs $$-squid-one 2>/dev/null|grep chosen.changedetection.io
if [ $? -ne 0 ]
then
echo "Did not see a request to chosen.changedetection.io in the squid logs (while checking preferred proxy)"
fi
# And one in the 'second' squid (user selects this as preferred)
docker logs $$-squid-two 2>/dev/null|grep chosen.changedetection.io
if [ $? -ne 0 ]
then
echo "Did not see a request to chosen.changedetection.io in the squid logs (while checking preferred proxy)"
fi
# @todo - test system override proxy selection and watch defaults, setup a 3rd squid?
docker kill $$-squid-one
docker kill $$-squid-two
docker kill $$-test_browserless

View File

@@ -1,42 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
width="15"
height="16.363636"
viewBox="0 0 15 16.363636"
version="1.1"
id="svg4"
sodipodi:docname="bell-off.svg"
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview5"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
showgrid="false"
fit-margin-top="0"
fit-margin-left="0"
fit-margin-right="0"
fit-margin-bottom="0"
inkscape:zoom="28.416667"
inkscape:cx="-0.59824046"
inkscape:cy="12"
inkscape:window-width="1554"
inkscape:window-height="896"
inkscape:window-x="2095"
inkscape:window-y="107"
inkscape:window-maximized="0"
inkscape:current-layer="svg4" />
<defs
id="defs8" />
<path
d="m 14.318182,11.762045 v 1.1925 H 5.4102273 L 11.849318,7.1140909 C 12.234545,9.1561364 12.54,11.181818 14.318182,11.762045 Z m -6.7984093,4.601591 c 1.0759091,0 2.0256823,-0.955909 2.0256823,-2.045454 H 5.4545455 c 0,1.089545 0.9879545,2.045454 2.0652272,2.045454 z M 15,2.8622727 0.9177273,15.636136 0,14.627045 l 1.8443182,-1.6725 h -1.1625 v -1.1925 C 4.0070455,10.677273 2.1784091,4.5388636 5.3611364,2.6897727 5.8009091,2.4347727 6.0709091,1.9609091 6.0702273,1.4488636 v -0.00205 C 6.0702273,0.64772727 6.7104545,0 7.5,0 8.2895455,0 8.9297727,0.64772727 8.9297727,1.4468182 v 0.00205 C 8.9290909,1.9602319 9.199773,2.4354591 9.638864,2.6897773 10.364318,3.111141 10.827273,3.7568228 11.1525,4.5129591 L 14.085682,1.8531818 Z M 6.8181818,1.3636364 C 6.8181818,1.74 7.1236364,2.0454545 7.5,2.0454545 7.8763636,2.0454545 8.1818182,1.74 8.1818182,1.3636364 8.1818182,0.98795455 7.8763636,0.68181818 7.5,0.68181818 c -0.3763636,0 -0.6818182,0.30613637 -0.6818182,0.68181822 z"
id="path2"
style="fill:#f8321b;stroke-width:0.681818;fill-opacity:1" />
</svg>

Before

Width:  |  Height:  |  Size: 2.1 KiB

View File

@@ -1,51 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="20.108334mm"
height="21.43125mm"
viewBox="0 0 20.108334 21.43125"
version="1.1"
id="svg5"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs2" />
<g
id="layer1"
transform="translate(-141.05873,-76.816635)">
<image
width="20.108334"
height="21.43125"
preserveAspectRatio="none"
style="image-rendering:optimizeQuality"
xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEwAAABRCAYAAAB430BuAAAABHNCSVQICAgIfAhkiAAABLxJREFU
eJztnN2Z2jgUhl8Z7petIGwF0WMXsFBBoIKwFWS2gmQryKSCJRXsTAUDBTDRVBCmgkAB9tkLexh+
bIONLGwP7xU2RjafpaOjoyNBCxHNQAJEfG5sl+3ZLrAWeAyST5/sF91mFH3bRbZbsAq4ClaQq2B7
iKYnmg9Z318F20ICRnj8pMOd6E3HscNVsATxmQD/oeghPCnDLO26q2AkYin+TQ7XREyyrn3zgu2J
BSEjZTBZ179pwQ7EEv7KaoovvFnBUsV6ZHrsd+0WTHhKPV1SLGivYEsA1KEtEs2grFitRjQ65VxP
fH5JgEjAKsvXupKwFfYxaYJeSeHcWqVSCuwD7/HQQD8lRHLWDStBWG3slbAElkTc5/lTZdkIJhpN
h6/UUZDyzAgZK8PKVoEKErE8HlD0bBVcI2ZqwdBWYbFgAT+g1UZwrBbcvRyIpofHJ1Sh1rQCZt1k
lN5msQAm8CoYoFF8KVHOsFtQ5aayExBUhpnopJl6J/3/FREGWCrxmaH40/4z1oyQ320Yf5dDozXC
P4QMCRkCY4S5w/tbMTtd4L2Ngo6wJmSQ4hfdScAU+OjgGazgOXEl8oJyof3Z6Spx0iTzgnLKsMoK
w9SRuoR3rHniVVMXwRpDXQR7d+kHOJV6CFZB0khVOBGsTcE6VzWsNVGQizfJptU+N4LlD3AbVfsu
XsOahhvB8nrB08IrtcGNYNIct+EYl2+S6mr0D8kLUMrV6BfFRTzOGs4Ey8p1aNrUnssaliaMO/vV
sfNi3AmW5j54DgUTO/dyJ1hab9iwHhLcNskP23ZMND0kewFBXek6vZvHg/hMiUPSN00z+OBasFig
y8wSRfnZ0adSBz+sUVwFK4jbJhnPP06To1ETczpcCnavHhltHd82LU0AXDbJMGXBU8PSBAA8Jxk0
wnNaqlGSJuAyg+dsXIV38iZqXU3iWsmodhetSNlDQgJGriZxbWVSe1hS/gQ+S/C6j4QEfES21vxU
icXsoC4vC5mqJvbybyXgduucG/YWaYmmj+IdHvpoxFdt8ltRP5h3iZjRqfBh60C4t1rNY7rxAU95
aYnhEp+/u8pgxGfeRCfyJIR5SkLfFOHYXMMzu63PEDF9WQnSo8MUmhduyUWYEzGyvnRmU3683ugG
GAG/2bqJU4RnFDNCpsfWb5chswUnwb5Xg+hxiyo9w7MGJoSVpmYulam+A8scS+5nPYtf+s9mpZw7
J1nayDnCVuu4Ck+E6DqIBYDHHR1+is/n8kVUhfBExMBFMzm4taafkXcWL9BSfBG/nNN8sutYcE3S
d7XI3o6lSpIe/xcAIX/svzDxMVu22BAyLNKL2q9hwrdLiZWwXbP6B99GDLaGSpoOD6JPn4yxK1i8
B0StY1zKsCJiQNxzQ0HRbAm2BsZN2TBDGVaE5USzIVjsNix2VrzWHmUwB6J5fD32uyKCzQ7OxG5D
vzZuQ0E2osXjRlBMjvWe5WtYPE4b2BynXQJlMEToTUegmEiwM1mzQ1nBvqvH5ov1wlZHcA+AZHdc
xQW7vNuQS9kBtzKs1IIRMM7b0q/YvGTzto4qbFutdV5FnLtLk2x3JVWUfXKTbIu9Opc2J6Osj19S
HLfJKO64r6rg/wFBX3+2ZapW8wAAAABJRU5ErkJggg==
"
id="image832"
x="141.05873"
y="76.816635" />
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.4 KiB

View File

@@ -1,122 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
version="1.1"
id="Capa_1"
x="0px"
y="0px"
viewBox="0 0 15 14.998326"
xml:space="preserve"
width="15"
height="14.998326"
sodipodi:docname="play.svg"
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:dc="http://purl.org/dc/elements/1.1/"><sodipodi:namedview
id="namedview21"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
showgrid="false"
inkscape:zoom="45.47174"
inkscape:cx="7.4991632"
inkscape:cy="7.4991632"
inkscape:window-width="1554"
inkscape:window-height="896"
inkscape:window-x="3048"
inkscape:window-y="227"
inkscape:window-maximized="0"
inkscape:current-layer="Capa_1" /><metadata
id="metadata39"><rdf:RDF><cc:Work
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs
id="defs37" />
<path
id="path2"
style="fill:#1b98f8;fill-opacity:1;stroke-width:0.0292893"
d="M 7.4980469,0 C 4.5496028,-0.04093755 1.7047721,1.8547661 0.58789062,4.5800781 -0.57819305,7.2574082 0.02636631,10.583252 2.0703125,12.671875 4.0368718,14.788335 7.2754393,15.560096 9.9882812,14.572266 12.800219,13.617028 14.874915,10.855516 14.986328,7.8847656 15.172991,4.9968456 13.497714,2.109448 10.910156,0.8203125 9.858961,0.28011352 8.6796569,-0.00179908 7.4980469,0 Z"
sodipodi:nodetypes="ccccccc" />
<g
id="g4"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g6"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g8"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g10"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g12"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g14"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g16"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g18"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g20"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g22"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g24"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g26"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g28"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g30"
transform="translate(-0.01903604,0.02221043)">
</g>
<g
id="g32"
transform="translate(-0.01903604,0.02221043)">
</g>
<path
sodipodi:type="star"
style="fill:#ffffff;fill-opacity:1;stroke-width:37.7953;paint-order:stroke fill markers"
id="path1203"
inkscape:flatsided="false"
sodipodi:sides="3"
sodipodi:cx="7.2964563"
sodipodi:cy="7.3240671"
sodipodi:r1="3.805218"
sodipodi:r2="1.9026089"
sodipodi:arg1="-0.0017436774"
sodipodi:arg2="1.0454539"
inkscape:rounded="0"
inkscape:randomized="0"
d="M 11.101669,7.317432 8.2506324,8.9701135 5.3995964,10.622795 5.3938504,7.3273846 5.3881041,4.0319742 8.2448863,5.6747033 Z"
inkscape:transform-center-x="-0.94843001"
inkscape:transform-center-y="0.0033175346" /></svg>

Before

Width:  |  Height:  |  Size: 3.5 KiB

View File

@@ -1,20 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
width="18"
height="19.92"
viewBox="0 0 18 19.92"
version="1.1"
id="svg6"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs10" />
<path
d="M -3,-2 H 21 V 22 H -3 Z"
fill="none"
id="path2" />
<path
d="m 15,14.08 c -0.76,0 -1.44,0.3 -1.96,0.77 L 5.91,10.7 C 5.96,10.47 6,10.24 6,10 6,9.76 5.96,9.53 5.91,9.3 L 12.96,5.19 C 13.5,5.69 14.21,6 15,6 16.66,6 18,4.66 18,3 18,1.34 16.66,0 15,0 c -1.66,0 -3,1.34 -3,3 0,0.24 0.04,0.47 0.09,0.7 L 5.04,7.81 C 4.5,7.31 3.79,7 3,7 1.34,7 0,8.34 0,10 c 0,1.66 1.34,3 3,3 0.79,0 1.5,-0.31 2.04,-0.81 l 7.12,4.16 c -0.05,0.21 -0.08,0.43 -0.08,0.65 0,1.61 1.31,2.92 2.92,2.92 1.61,0 2.92,-1.31 2.92,-2.92 0,-1.61 -1.31,-2.92 -2.92,-2.92 z"
id="path4"
style="fill:#ffffff;fill-opacity:1" />
</svg>

Before

Width:  |  Height:  |  Size: 892 B

View File

@@ -10,13 +10,7 @@ $(document).ready(function () {
if (hash_name === '#screenshot') {
$("img#screenshot-img").attr('src', screenshot_url);
$("#settings").hide();
} else if (hash_name === '#error-screenshot') {
$("img#error-screenshot-img").attr('src', error_screenshot_url);
$("#settings").hide();
}
else {
} else {
$("#settings").show();
}
}

View File

@@ -40,19 +40,13 @@ $(document).ready(function() {
$.ajax({
type: "POST",
url: notification_base_url,
data : data,
statusCode: {
400: function() {
// More than likely the CSRF token was lost when the server restarted
alert("There was a problem processing the request, please reload the page.");
}
}
data : data
}).done(function(data){
console.log(data);
alert('Sent');
}).fail(function(data){
console.log(data);
alert('There was an error communicating with the server.');
alert('Error: '+data.responseJSON.error);
})
});
});

View File

@@ -1,44 +1,51 @@
// Rewrite this is a plugin.. is all this JS really 'worth it?'
window.addEventListener('hashchange', function () {
var tabs = document.getElementsByClassName('active');
while (tabs[0]) {
tabs[0].classList.remove('active')
}
set_active_tab();
if(!window.location.hash) {
var tab=document.querySelectorAll("#default-tab a");
tab[0].click();
}
window.addEventListener('hashchange', function() {
var tabs = document.getElementsByClassName('active');
while (tabs[0]) {
tabs[0].classList.remove('active')
}
set_active_tab();
}, false);
var has_errors = document.querySelectorAll(".messages .error");
var has_errors=document.querySelectorAll(".messages .error");
if (!has_errors.length) {
if (document.location.hash == "") {
document.querySelector(".tabs ul li:first-child a").click();
if (document.location.hash == "" ) {
document.location.hash = "#general";
document.getElementById("default-tab").className = "active";
} else {
set_active_tab();
}
} else {
focus_error_tab();
focus_error_tab();
}
function set_active_tab() {
var tab = document.querySelectorAll("a[href='" + location.hash + "']");
if (tab.length) {
tab[0].parentElement.className = "active";
}
var tab=document.querySelectorAll("a[href='"+location.hash+"']");
if (tab.length) {
tab[0].parentElement.className="active";
}
// hash could move the page down
window.scrollTo(0, 0);
}
function focus_error_tab() {
// time to use jquery or vuejs really,
// activate the tab with the error
var tabs = document.querySelectorAll('.tabs li a'), i;
// time to use jquery or vuejs really,
// activate the tab with the error
var tabs = document.querySelectorAll('.tabs li a'),i;
for (i = 0; i < tabs.length; ++i) {
var tab_name = tabs[i].hash.replace('#', '');
var pane_errors = document.querySelectorAll('#' + tab_name + ' .error')
if (pane_errors.length) {
document.location.hash = '#' + tab_name;
return true;
}
var tab_name=tabs[i].hash.replace('#','');
var pane_errors=document.querySelectorAll('#'+tab_name+' .error')
if (pane_errors.length) {
document.location.hash = '#'+tab_name;
return true;
}
}
return false;
}

View File

@@ -22,18 +22,5 @@ $(function () {
});
});
// checkboxes - check all
$("#check-all").click(function (e) {
$('input[type=checkbox]').not(this).prop('checked', this.checked);
});
// checkboxes - show/hide buttons
$("input[type=checkbox]").click(function (e) {
if ($('input[type=checkbox]:checked').length) {
$('#checkbox-operations').slideDown();
} else {
$('#checkbox-operations').slideUp();
}
});
});

View File

@@ -1,40 +1,16 @@
$(document).ready(function() {
function toggle() {
if ($('input[name="fetch_backend"]:checked').val() == 'html_webdriver') {
if(playwright_enabled) {
// playwright supports headers, so hide everything else
// See #664
$('#requests-override-options #request-method').hide();
$('#requests-override-options #request-body').hide();
// @todo connect this one up
$('#ignore-status-codes-option').hide();
} else {
// selenium/webdriver doesnt support anything afaik, hide it all
$('#requests-override-options').hide();
}
if ($('input[name="fetch_backend"]:checked').val() != 'html_requests') {
$('#requests-override-options').hide();
$('#webdriver-override-options').show();
} else {
$('#requests-override-options').show();
$('#requests-override-options *:hidden').show();
$('#webdriver-override-options').hide();
}
}
$('input[name="fetch_backend"]').click(function (e) {
toggle();
});
toggle();
$('#notification-setting-reset-to-default').click(function (e) {
$('#notification_title').val('');
$('#notification_body').val('');
$('#notification_format').val('System default');
$('#notification_urls').val('');
e.preventDefault();
});
});

View File

@@ -1,3 +1 @@
node_modules
package-lock.json

File diff suppressed because it is too large Load Diff

View File

@@ -1,26 +0,0 @@
.arrow {
border: solid #1b98f8;
border-width: 0 2px 2px 0;
display: inline-block;
padding: 3px;
&.right {
transform: rotate(-45deg);
-webkit-transform: rotate(-45deg);
}
&.left {
transform: rotate(135deg);
-webkit-transform: rotate(135deg);
}
&.up, &.asc {
transform: rotate(-135deg);
-webkit-transform: rotate(-135deg);
}
&.down, &.desc {
transform: rotate(45deg);
-webkit-transform: rotate(45deg);
}
}

View File

@@ -1,27 +1,11 @@
/*
* -- BASE STYLES --
* Most of these are inherited from Base, but I want to change a few.
* nvm use v14.18.1 && npm install && npm run build
* nvm use v14.18.1
* npm install
* npm run build
* or npm run watch
*/
.arrow {
border: solid #1b98f8;
border-width: 0 2px 2px 0;
display: inline-block;
padding: 3px; }
.arrow.right {
transform: rotate(-45deg);
-webkit-transform: rotate(-45deg); }
.arrow.left {
transform: rotate(135deg);
-webkit-transform: rotate(135deg); }
.arrow.up, .arrow.asc {
transform: rotate(-135deg);
-webkit-transform: rotate(-135deg); }
.arrow.down, .arrow.desc {
transform: rotate(45deg);
-webkit-transform: rotate(45deg); }
body {
color: #333;
background: #262626; }
@@ -71,12 +55,6 @@ code {
white-space: normal; }
.watch-table th {
white-space: nowrap; }
.watch-table th a {
font-weight: normal; }
.watch-table th a.active {
font-weight: bolder; }
.watch-table th a.inactive .arrow {
display: none; }
.watch-table .title-col a[target="_blank"]::after, .watch-table .current-diff-url::after {
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
margin: 0 3px 0 5px; }
@@ -127,6 +105,24 @@ body:after, body:before {
-webkit-clip-path: polygon(100% 0, 0 0, 0 77.5%, 1% 77.4%, 2% 77.1%, 3% 76.6%, 4% 75.9%, 5% 75.05%, 6% 74.05%, 7% 72.95%, 8% 71.75%, 9% 70.55%, 10% 69.3%, 11% 68.05%, 12% 66.9%, 13% 65.8%, 14% 64.8%, 15% 64%, 16% 63.35%, 17% 62.85%, 18% 62.6%, 19% 62.5%, 20% 62.65%, 21% 63%, 22% 63.5%, 23% 64.2%, 24% 65.1%, 25% 66.1%, 26% 67.2%, 27% 68.4%, 28% 69.65%, 29% 70.9%, 30% 72.15%, 31% 73.3%, 32% 74.35%, 33% 75.3%, 34% 76.1%, 35% 76.75%, 36% 77.2%, 37% 77.45%, 38% 77.5%, 39% 77.3%, 40% 76.95%, 41% 76.4%, 42% 75.65%, 43% 74.75%, 44% 73.75%, 45% 72.6%, 46% 71.4%, 47% 70.15%, 48% 68.9%, 49% 67.7%, 50% 66.55%, 51% 65.5%, 52% 64.55%, 53% 63.75%, 54% 63.15%, 55% 62.75%, 56% 62.55%, 57% 62.5%, 58% 62.7%, 59% 63.1%, 60% 63.7%, 61% 64.45%, 62% 65.4%, 63% 66.45%, 64% 67.6%, 65% 68.8%, 66% 70.05%, 67% 71.3%, 68% 72.5%, 69% 73.6%, 70% 74.65%, 71% 75.55%, 72% 76.35%, 73% 76.9%, 74% 77.3%, 75% 77.5%, 76% 77.45%, 77% 77.25%, 78% 76.8%, 79% 76.2%, 80% 75.4%, 81% 74.45%, 82% 73.4%, 83% 72.25%, 84% 71.05%, 85% 69.8%, 86% 68.55%, 87% 67.35%, 88% 66.2%, 89% 65.2%, 90% 64.3%, 91% 63.55%, 92% 63%, 93% 62.65%, 94% 62.5%, 95% 62.55%, 96% 62.8%, 97% 63.3%, 98% 63.9%, 99% 64.75%, 100% 65.7%);
clip-path: polygon(100% 0, 0 0, 0 77.5%, 1% 77.4%, 2% 77.1%, 3% 76.6%, 4% 75.9%, 5% 75.05%, 6% 74.05%, 7% 72.95%, 8% 71.75%, 9% 70.55%, 10% 69.3%, 11% 68.05%, 12% 66.9%, 13% 65.8%, 14% 64.8%, 15% 64%, 16% 63.35%, 17% 62.85%, 18% 62.6%, 19% 62.5%, 20% 62.65%, 21% 63%, 22% 63.5%, 23% 64.2%, 24% 65.1%, 25% 66.1%, 26% 67.2%, 27% 68.4%, 28% 69.65%, 29% 70.9%, 30% 72.15%, 31% 73.3%, 32% 74.35%, 33% 75.3%, 34% 76.1%, 35% 76.75%, 36% 77.2%, 37% 77.45%, 38% 77.5%, 39% 77.3%, 40% 76.95%, 41% 76.4%, 42% 75.65%, 43% 74.75%, 44% 73.75%, 45% 72.6%, 46% 71.4%, 47% 70.15%, 48% 68.9%, 49% 67.7%, 50% 66.55%, 51% 65.5%, 52% 64.55%, 53% 63.75%, 54% 63.15%, 55% 62.75%, 56% 62.55%, 57% 62.5%, 58% 62.7%, 59% 63.1%, 60% 63.7%, 61% 64.45%, 62% 65.4%, 63% 66.45%, 64% 67.6%, 65% 68.8%, 66% 70.05%, 67% 71.3%, 68% 72.5%, 69% 73.6%, 70% 74.65%, 71% 75.55%, 72% 76.35%, 73% 76.9%, 74% 77.3%, 75% 77.5%, 76% 77.45%, 77% 77.25%, 78% 76.8%, 79% 76.2%, 80% 75.4%, 81% 74.45%, 82% 73.4%, 83% 72.25%, 84% 71.05%, 85% 69.8%, 86% 68.55%, 87% 67.35%, 88% 66.2%, 89% 65.2%, 90% 64.3%, 91% 63.55%, 92% 63%, 93% 62.65%, 94% 62.5%, 95% 62.55%, 96% 62.8%, 97% 63.3%, 98% 63.9%, 99% 64.75%, 100% 65.7%); }
.arrow {
border: solid black;
border-width: 0 3px 3px 0;
display: inline-block;
padding: 3px; }
.arrow.right {
transform: rotate(-45deg);
-webkit-transform: rotate(-45deg); }
.arrow.left {
transform: rotate(135deg);
-webkit-transform: rotate(135deg); }
.arrow.up {
transform: rotate(-135deg);
-webkit-transform: rotate(-135deg); }
.arrow.down {
transform: rotate(45deg);
-webkit-transform: rotate(45deg); }
.button-small {
font-size: 85%; }
@@ -207,18 +203,13 @@ body:after, body:before {
border-radius: 10px;
margin-bottom: 1em; }
#new-watch-form input {
display: inline-block;
margin-bottom: 5px; }
width: auto !important;
display: inline-block; }
#new-watch-form .label {
display: none; }
#new-watch-form legend {
color: #fff;
font-weight: bold; }
#new-watch-form #watch-add-wrapper-zone > div {
display: inline-block; }
@media only screen and (max-width: 760px) {
#new-watch-form #watch-add-wrapper-zone #url {
width: 100%; } }
#diff-col {
padding-left: 40px; }
@@ -277,15 +268,11 @@ footer {
#new-version-text a {
color: #e07171; }
.watch-controls {
/* default */ }
.watch-controls .state-on img {
opacity: 0.8; }
.watch-controls img {
opacity: 0.2; }
.watch-controls img:hover {
transition: opacity 0.3s;
opacity: 0.8; }
.paused-state.state-False img {
opacity: 0.2; }
.paused-state.state-False:hover img {
opacity: 0.8; }
.monospaced-textarea textarea {
width: 100%;
@@ -366,8 +353,6 @@ and also iPads specifically.
/* Hide table headers (but not display: none;, for accessibility) */ }
.watch-table thead, .watch-table tbody, .watch-table th, .watch-table td, .watch-table tr {
display: block; }
.watch-table .last-checked > span {
vertical-align: middle; }
.watch-table .last-checked::before {
color: #555;
content: "Last Checked "; }
@@ -385,8 +370,7 @@ and also iPads specifically.
.watch-table td {
/* Behave like a "row" */
border: none;
border-bottom: 1px solid #eee;
vertical-align: middle; }
border-bottom: 1px solid #eee; }
.watch-table td:before {
/* Top/left values mimic padding */
top: 6px;
@@ -506,75 +490,3 @@ ul {
#api-key-copy {
color: #0078e7; }
/* spinner */
.loader,
.loader:after {
border-radius: 50%;
width: 10px;
height: 10px; }
.loader {
margin: 0px auto;
font-size: 3px;
vertical-align: middle;
display: inline-block;
text-indent: -9999em;
border-top: 1.1em solid rgba(38, 104, 237, 0.2);
border-right: 1.1em solid rgba(38, 104, 237, 0.2);
border-bottom: 1.1em solid rgba(38, 104, 237, 0.2);
border-left: 1.1em solid #2668ed;
-webkit-transform: translateZ(0);
-ms-transform: translateZ(0);
transform: translateZ(0);
-webkit-animation: load8 1.1s infinite linear;
animation: load8 1.1s infinite linear; }
@-webkit-keyframes load8 {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg); }
100% {
-webkit-transform: rotate(360deg);
transform: rotate(360deg); } }
@keyframes load8 {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg); }
100% {
-webkit-transform: rotate(360deg);
transform: rotate(360deg); } }
.snapshot-age {
padding: 4px;
background-color: #dfdfdf;
border-radius: 3px;
font-weight: bold;
margin-bottom: 4px; }
.snapshot-age.error {
background-color: #ff0000;
color: #fff; }
#checkbox-operations {
background: rgba(0, 0, 0, 0.05);
padding: 1em;
border-radius: 10px;
margin-bottom: 1em;
display: none; }
.checkbox-uuid > * {
vertical-align: middle; }
.inline-warning {
border: 1px solid #ff3300;
padding: 0.5rem;
border-radius: 5px;
color: #ff3300; }
.inline-warning > span {
display: inline-block;
vertical-align: middle; }
.inline-warning img.inline-warning-icon {
display: inline;
height: 26px;
vertical-align: middle; }

View File

@@ -1,11 +1,11 @@
/*
* -- BASE STYLES --
* Most of these are inherited from Base, but I want to change a few.
* nvm use v14.18.1 && npm install && npm run build
* nvm use v14.18.1
* npm install
* npm run build
* or npm run watch
*/
@import "parts/_arrows.scss";
body {
color: #333;
background: #262626;
@@ -70,17 +70,6 @@ code {
th {
white-space: nowrap;
a {
font-weight: normal;
&.active {
font-weight: bolder;
}
&.inactive {
.arrow {
display: none;
}
}
}
}
.title-col a[target="_blank"]::after, .current-diff-url::after {
@@ -150,6 +139,29 @@ body:after, body:before {
clip-path: polygon(100% 0, 0 0, 0 77.5%, 1% 77.4%, 2% 77.1%, 3% 76.6%, 4% 75.9%, 5% 75.05%, 6% 74.05%, 7% 72.95%, 8% 71.75%, 9% 70.55%, 10% 69.3%, 11% 68.05%, 12% 66.9%, 13% 65.8%, 14% 64.8%, 15% 64%, 16% 63.35%, 17% 62.85%, 18% 62.6%, 19% 62.5%, 20% 62.65%, 21% 63%, 22% 63.5%, 23% 64.2%, 24% 65.1%, 25% 66.1%, 26% 67.2%, 27% 68.4%, 28% 69.65%, 29% 70.9%, 30% 72.15%, 31% 73.3%, 32% 74.35%, 33% 75.3%, 34% 76.1%, 35% 76.75%, 36% 77.2%, 37% 77.45%, 38% 77.5%, 39% 77.3%, 40% 76.95%, 41% 76.4%, 42% 75.65%, 43% 74.75%, 44% 73.75%, 45% 72.6%, 46% 71.4%, 47% 70.15%, 48% 68.9%, 49% 67.7%, 50% 66.55%, 51% 65.5%, 52% 64.55%, 53% 63.75%, 54% 63.15%, 55% 62.75%, 56% 62.55%, 57% 62.5%, 58% 62.7%, 59% 63.1%, 60% 63.7%, 61% 64.45%, 62% 65.4%, 63% 66.45%, 64% 67.6%, 65% 68.8%, 66% 70.05%, 67% 71.3%, 68% 72.5%, 69% 73.6%, 70% 74.65%, 71% 75.55%, 72% 76.35%, 73% 76.9%, 74% 77.3%, 75% 77.5%, 76% 77.45%, 77% 77.25%, 78% 76.8%, 79% 76.2%, 80% 75.4%, 81% 74.45%, 82% 73.4%, 83% 72.25%, 84% 71.05%, 85% 69.8%, 86% 68.55%, 87% 67.35%, 88% 66.2%, 89% 65.2%, 90% 64.3%, 91% 63.55%, 92% 63%, 93% 62.65%, 94% 62.5%, 95% 62.55%, 96% 62.8%, 97% 63.3%, 98% 63.9%, 99% 64.75%, 100% 65.7%)
}
.arrow {
border: solid black;
border-width: 0 3px 3px 0;
display: inline-block;
padding: 3px;
&.right {
transform: rotate(-45deg);
-webkit-transform: rotate(-45deg);
}
&.left {
transform: rotate(135deg);
-webkit-transform: rotate(135deg);
}
&.up {
transform: rotate(-135deg);
-webkit-transform: rotate(-135deg);
}
&.down {
transform: rotate(45deg);
-webkit-transform: rotate(45deg);
}
}
.button-small {
font-size: 85%;
}
@@ -257,8 +269,8 @@ body:after, body:before {
border-radius: 10px;
margin-bottom: 1em;
input {
width: auto !important;
display: inline-block;
margin-bottom: 5px;
}
.label {
display: none;
@@ -267,17 +279,6 @@ body:after, body:before {
color: #fff;
font-weight: bold;
}
#watch-add-wrapper-zone {
> div {
display: inline-block;
}
@media only screen and (max-width: 760px) {
#url {
width: 100%;
}
}
}
}
@@ -352,25 +353,14 @@ footer {
color: #e07171;
}
.watch-controls {
.state-on {
img {
opacity: 0.8;
}
}
/* default */
img {
.paused-state {
&.state-False img {
opacity: 0.2;
}
img {
&:hover {
transition: opacity 0.3s;
opacity: 0.8;
}
&.state-False:hover img {
opacity: 0.8;
}
}
.monospaced-textarea {
@@ -497,12 +487,6 @@ and also iPads specifically.
display: block;
}
.last-checked {
> span {
vertical-align: middle;
}
}
.last-checked::before {
color: #555;
content: "Last Checked ";
@@ -533,7 +517,7 @@ and also iPads specifically.
/* Behave like a "row" */
border: none;
border-bottom: 1px solid #eee;
vertical-align: middle;
&:before {
/* Top/left values mimic padding */
top: 6px;
@@ -717,90 +701,3 @@ ul {
#api-key-copy {
color: #0078e7;
}
/* spinner */
.loader,
.loader:after {
border-radius: 50%;
width: 10px;
height: 10px;
}
.loader {
margin: 0px auto;
font-size: 3px;
vertical-align: middle;
display: inline-block;
text-indent: -9999em;
border-top: 1.1em solid rgba(38,104,237, 0.2);
border-right: 1.1em solid rgba(38,104,237, 0.2);
border-bottom: 1.1em solid rgba(38,104,237, 0.2);
border-left: 1.1em solid #2668ed;
-webkit-transform: translateZ(0);
-ms-transform: translateZ(0);
transform: translateZ(0);
-webkit-animation: load8 1.1s infinite linear;
animation: load8 1.1s infinite linear;
}
@-webkit-keyframes load8 {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-webkit-transform: rotate(360deg);
transform: rotate(360deg);
}
}
@keyframes load8 {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-webkit-transform: rotate(360deg);
transform: rotate(360deg);
}
}
.snapshot-age {
padding: 4px;
background-color: #dfdfdf;
border-radius: 3px;
font-weight: bold;
margin-bottom: 4px;
&.error {
background-color: #ff0000;
color: #fff;
}
}
#checkbox-operations {
background: rgba(0, 0, 0, 0.05);
padding: 1em;
border-radius: 10px;
margin-bottom: 1em;
display: none;
}
.checkbox-uuid {
> * {
vertical-align: middle;
}
}
.inline-warning {
> span {
display: inline-block;
vertical-align: middle;
}
img.inline-warning-icon {
display: inline;
height: 26px;
vertical-align: middle;
}
border: 1px solid #ff3300;
padding: 0.5rem;
border-radius: 5px;
color: #ff3300;
}

View File

@@ -8,7 +8,7 @@ import threading
import time
import uuid as uuid_builder
from copy import deepcopy
from os import path, unlink
from os import mkdir, path, unlink
from threading import Lock
import re
import requests
@@ -30,14 +30,14 @@ class ChangeDetectionStore:
def __init__(self, datastore_path="/datastore", include_default_watches=True, version_tag="0.0.0"):
# Should only be active for docker
# logging.basicConfig(filename='/dev/stdout', level=logging.INFO)
self.__data = App.model()
self.needs_write = False
self.datastore_path = datastore_path
self.json_store_path = "{}/url-watches.json".format(self.datastore_path)
self.needs_write = False
self.proxy_list = None
self.start_time = time.time()
self.stop_thread = False
self.__data = App.model()
# Base definition for all watchers
# deepcopy part of #569 - not sure why its needed exactly
self.generic_definition = deepcopy(Watch.model(datastore_path = datastore_path, default={}))
@@ -81,6 +81,8 @@ class ChangeDetectionStore:
except (FileNotFoundError, json.decoder.JSONDecodeError):
if include_default_watches:
print("Creating JSON store at", self.datastore_path)
self.add_watch(url='http://www.quotationspage.com/random.php', tag='test')
self.add_watch(url='https://news.ycombinator.com/', tag='Tech news')
self.add_watch(url='https://changedetection.io/CHANGELOG.txt', tag='changedetection.io')
@@ -111,7 +113,9 @@ class ChangeDetectionStore:
self.__data['settings']['application']['api_access_token'] = secret
# Proxy list support - available as a selection in settings when text file is imported
proxy_list_file = "{}/proxies.json".format(self.datastore_path)
# CSV list
# "name, address", or just "name"
proxy_list_file = "{}/proxies.txt".format(self.datastore_path)
if path.isfile(proxy_list_file):
self.import_proxy_list(proxy_list_file)
@@ -154,11 +158,13 @@ class ChangeDetectionStore:
@property
def threshold_seconds(self):
seconds = 0
for m, n in Watch.mtable.items():
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
for m, n in mtable.items():
x = self.__data['settings']['requests']['time_between_check'].get(m)
if x:
seconds += x * n
return seconds
return max(seconds, minimum_seconds_recheck_time)
@property
def has_unviewed(self):
@@ -240,12 +246,17 @@ class ChangeDetectionStore:
return False
def get_val(self, uuid, val):
# Probably their should be dict...
return self.data['watching'][uuid].get(val)
# Remove a watchs data but keep the entry (URL etc)
def clear_watch_history(self, uuid):
def scrub_watch(self, uuid):
import pathlib
self.__data['watching'][uuid].update(
{'last_checked': 0,
'last_changed': 0,
'last_viewed': 0,
'previous_md5': False,
'last_notification_error': False,
@@ -280,16 +291,14 @@ class ChangeDetectionStore:
headers={'App-Guid': self.__data['app_guid']})
res = r.json()
# List of permissible attributes we accept from the wild internet
# List of permisable stuff we accept from the wild internet
for k in ['url', 'tag',
'paused', 'title',
'previous_md5', 'headers',
'body', 'method',
'ignore_text', 'css_filter',
'subtractive_selectors', 'trigger_text',
'extract_title_as_title', 'extract_text',
'text_should_not_be_present',
'webdriver_js_execute_code']:
'paused', 'title',
'previous_md5', 'headers',
'body', 'method',
'ignore_text', 'css_filter',
'subtractive_selectors', 'trigger_text',
'extract_title_as_title', 'extract_text']:
if res.get(k):
apply_extras[k] = res[k]
@@ -316,12 +325,25 @@ class ChangeDetectionStore:
new_watch.update(apply_extras)
self.__data['watching'][new_uuid]=new_watch
self.__data['watching'][new_uuid].ensure_data_dir_exists()
# Get the directory ready
output_path = "{}/{}".format(self.datastore_path, new_uuid)
try:
mkdir(output_path)
except FileExistsError:
print(output_path, "already exists.")
if write_to_disk_now:
self.sync_to_json()
return new_uuid
def get_screenshot(self, watch_uuid):
output_path = "{}/{}".format(self.datastore_path, watch_uuid)
fname = "{}/last-screenshot.png".format(output_path)
if path.isfile(fname):
return fname
return False
def visualselector_data_is_ready(self, watch_uuid):
output_path = "{}/{}".format(self.datastore_path, watch_uuid)
screenshot_filename = "{}/last-screenshot.png".format(output_path)
@@ -332,38 +354,17 @@ class ChangeDetectionStore:
return False
# Save as PNG, PNG is larger but better for doing visual diff in the future
def save_screenshot(self, watch_uuid, screenshot: bytes, as_error=False):
if not self.data['watching'].get(watch_uuid):
return
if as_error:
target_path = os.path.join(self.datastore_path, watch_uuid, "last-error-screenshot.png")
else:
target_path = os.path.join(self.datastore_path, watch_uuid, "last-screenshot.png")
self.data['watching'][watch_uuid].ensure_data_dir_exists()
with open(target_path, 'wb') as f:
def save_screenshot(self, watch_uuid, screenshot: bytes):
output_path = "{}/{}".format(self.datastore_path, watch_uuid)
fname = "{}/last-screenshot.png".format(output_path)
with open(fname, 'wb') as f:
f.write(screenshot)
f.close()
def save_error_text(self, watch_uuid, contents):
if not self.data['watching'].get(watch_uuid):
return
target_path = os.path.join(self.datastore_path, watch_uuid, "last-error.txt")
with open(target_path, 'w') as f:
f.write(contents)
def save_xpath_data(self, watch_uuid, data, as_error=False):
if not self.data['watching'].get(watch_uuid):
return
if as_error:
target_path = os.path.join(self.datastore_path, watch_uuid, "elements-error.json")
else:
target_path = os.path.join(self.datastore_path, watch_uuid, "elements.json")
with open(target_path, 'w') as f:
def save_xpath_data(self, watch_uuid, data):
output_path = "{}/{}".format(self.datastore_path, watch_uuid)
fname = "{}/elements.json".format(output_path)
with open(fname, 'w') as f:
f.write(json.dumps(data))
f.close()
@@ -433,42 +434,20 @@ class ChangeDetectionStore:
unlink(item)
def import_proxy_list(self, filename):
with open(filename) as f:
self.proxy_list = json.load(f)
print ("Registered proxy list", list(self.proxy_list.keys()))
import csv
with open(filename, newline='') as f:
reader = csv.reader(f, skipinitialspace=True)
# @todo This loop can could be improved
l = []
for row in reader:
if len(row):
if len(row)>=2:
l.append(tuple(row[:2]))
else:
l.append(tuple([row[0], row[0]]))
self.proxy_list = l if len(l) else None
def get_preferred_proxy_for_watch(self, uuid):
"""
Returns the preferred proxy by ID key
:param uuid: UUID
:return: proxy "key" id
"""
proxy_id = None
if self.proxy_list is None:
return None
# If its a valid one
watch = self.data['watching'].get(uuid)
if watch.get('proxy') and watch.get('proxy') in list(self.proxy_list.keys()):
return watch.get('proxy')
# not valid (including None), try the system one
else:
system_proxy_id = self.data['settings']['requests'].get('proxy')
# Is not None and exists
if self.proxy_list.get(system_proxy_id):
return system_proxy_id
# Fallback - Did not resolve anything, use the first available
if system_proxy_id is None:
first_default = list(self.proxy_list)[0]
return first_default
return None
# Run all updates
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
# So therefor - each `update_n` should be very careful about checking if it needs to actually run
@@ -539,47 +518,3 @@ class ChangeDetectionStore:
# But we should set it back to a empty dict so we don't break if this schema runs on an earlier version.
# In the distant future we can remove this entirely
self.data['watching'][uuid]['history'] = {}
# We incorrectly stored last_changed when there was not a change, and then confused the output list table
def update_3(self):
# see https://github.com/dgtlmoon/changedetection.io/pull/835
return
# `last_changed` not needed, we pull that information from the history.txt index
def update_4(self):
for uuid, watch in self.data['watching'].items():
try:
# Remove it from the struct
del(watch['last_changed'])
except:
continue
return
def update_5(self):
# If the watch notification body, title look the same as the global one, unset it, so the watch defaults back to using the main settings
# In other words - the watch notification_title and notification_body are not needed if they are the same as the default one
current_system_body = self.data['settings']['application']['notification_body'].translate(str.maketrans('', '', "\r\n "))
current_system_title = self.data['settings']['application']['notification_body'].translate(str.maketrans('', '', "\r\n "))
for uuid, watch in self.data['watching'].items():
try:
watch_body = watch.get('notification_body', '')
if watch_body and watch_body.translate(str.maketrans('', '', "\r\n ")) == current_system_body:
# Looks the same as the default one, so unset it
watch['notification_body'] = None
watch_title = watch.get('notification_title', '')
if watch_title and watch_title.translate(str.maketrans('', '', "\r\n ")) == current_system_title:
# Looks the same as the default one, so unset it
watch['notification_title'] = None
except Exception as e:
continue
return
# We incorrectly used common header overrides that should only apply to Requests
# These are now handled in content_fetcher::html_requests and shouldnt be passed to Playwright/Selenium
def update_7(self):
# These were hard-coded in early versions
for v in ['User-Agent', 'Accept', 'Accept-Encoding', 'Accept-Language']:
if self.data['settings']['headers'].get(v):
del self.data['settings']['headers'][v]

View File

@@ -1,21 +1,20 @@
{% from '_helpers.jinja' import render_field %}
{% macro render_common_settings_form(form, emailprefix, settings_application) %}
{% macro render_common_settings_form(form, current_base_url, emailprefix) %}
<div class="pure-control-group">
{{ render_field(form.notification_urls, rows=5, placeholder="Examples:
Gitter - gitter://token/room
Office365 - o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail
AWS SNS - sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo
SMTPS - mailtos://user:pass@mail.domain.com?to=receivingAddress@example.com",
class="notification-urls" )
SMTPS - mailtos://user:pass@mail.domain.com?to=receivingAddress@example.com", class="notification-urls")
}}
<div class="pure-form-message-inline">
<ul>
<li>Use <a target=_new href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service! <i><a target=_new href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes">Please read the notification services wiki here for important configuration notes</a></i>.</li>
<li><code>discord://</code> only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li>
<li><code>tgram://</code> bots cant send messages to other bots, so you should specify chat ID of non-bot user.</li>
<li><code>tgram://</code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li>
</ul>
</div>
<br/>
@@ -27,16 +26,15 @@
</div>
<div id="notification-customisation" class="pure-control-group">
<div class="pure-control-group">
{{ render_field(form.notification_title, class="m-d notification-title", placeholder=settings_application['notification_title']) }}
{{ render_field(form.notification_title, class="m-d notification-title") }}
<span class="pure-form-message-inline">Title for all notifications</span>
</div>
<div class="pure-control-group">
{{ render_field(form.notification_body , rows=5, class="notification-body", placeholder=settings_application['notification_body']) }}
{{ render_field(form.notification_body , rows=5, class="notification-body") }}
<span class="pure-form-message-inline">Body for all notifications</span>
</div>
<div class="pure-control-group">
<!-- unsure -->
{{ render_field(form.notification_format , class="notification-format") }}
{{ render_field(form.notification_format , rows=5, class="notification-format") }}
<span class="pure-form-message-inline">Format for all notifications</span>
</div>
<div class="pure-controls">
@@ -96,7 +94,7 @@
</table>
<br/>
URLs generated by changedetection.io (such as <code>{diff_url}</code>) require the <code>BASE_URL</code> environment variable set.<br/>
Your <code>BASE_URL</code> var is currently "{{settings_application['current_base_url']}}"
Your <code>BASE_URL</code> var is currently "{{current_base_url}}"
</span>
</div>
</div>

View File

@@ -1,7 +0,0 @@
{% macro pagination(sorted_watches, total_per_page, current_page) %}
{{ sorted_watches|length }}
{% for row in sorted_watches|batch(total_per_page, '&nbsp;') %}
{{ loop.index}}
{% endfor %}
{% endmacro %}

View File

@@ -3,9 +3,6 @@
{% block content %}
<script>
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
{% if last_error_screenshot %}
const error_screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid, error_screenshot=1) }}";
{% endif %}
</script>
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='diff-overview.js')}}" defer></script>
@@ -25,7 +22,7 @@
{% if versions|length >= 1 %}
<label for="diff-version">Compare newest (<span id="current-v-date"></span>) with</label>
<select id="diff-version" name="previous_version">
{% for version in versions|reverse %}
{% for version in versions %}
<option value="{{version}}" {% if version== current_previous_version %} selected="" {% endif %}>
{{version}}
</option>
@@ -46,31 +43,15 @@
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
<div class="tabs">
<ul>
{% if last_error_text %}<li class="tab" id="error-text-tab"><a href="#error-text">Error Text</a></li> {% endif %}
{% if last_error_screenshot %}<li class="tab" id="error-screenshot-tab"><a href="#error-screenshot">Error Screenshot</a></li> {% endif %}
<li class="tab" id=""><a href="#text">Text</a></li>
<li class="tab" id="default-tab"><a href="#text">Text</a></li>
<li class="tab" id="screenshot-tab"><a href="#screenshot">Screenshot</a></li>
</ul>
</div>
<div id="diff-ui">
<div class="tab-pane-inner" id="error-text">
<div class="snapshot-age error">{{watch_a.error_text_ctime|format_seconds_ago}} seconds ago</div>
<pre>
{{ last_error_text }}
</pre>
</div>
<div class="tab-pane-inner" id="error-screenshot">
<div class="snapshot-age error">{{watch_a.snapshot_error_screenshot_ctime|format_seconds_ago}} seconds ago</div>
<img id="error-screenshot-img" style="max-width: 80%" alt="Current error-ing screenshot from most recent request"/>
</div>
<div class="tab-pane-inner" id="text">
<div class="tip">Pro-tip: Use <strong>show current snapshot</strong> tab to visualise what will be ignored.
</div>
<div class="snapshot-age">{{watch_a.snapshot_text_ctime|format_timestamp_timeago}}</div>
<table>
<tbody>
<tr>
@@ -89,10 +70,10 @@
<div class="tip">
For now, Differences are performed on text, not graphically, only the latest screenshot is available.
</div>
</br>
{% if is_html_webdriver %}
{% if screenshot %}
<div class="snapshot-age">{{watch_a.snapshot_screenshot_ctime|format_timestamp_timeago}}</div>
<img style="max-width: 80%" id="screenshot-img" alt="Current screenshot from most recent request"/>
<img style="max-width: 80%" id="screenshot-img" alt="Current screenshot from most recent request"/>
{% else %}
No screenshot available just yet! Try rechecking the page.
{% endif %}
@@ -107,6 +88,7 @@
<script defer="">
var a = document.getElementById('a');
var b = document.getElementById('b');
var result = document.getElementById('result');

View File

@@ -7,7 +7,6 @@
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
const watch_visual_selector_data_url="{{url_for('static_content', group='visual_selector_data', filename=uuid)}}";
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %};
{% if emailprefix %}
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
@@ -23,9 +22,9 @@
<div class="tabs collapsable">
<ul>
<li class="tab" id=""><a href="#general">General</a></li>
<li class="tab" id="default-tab"><a href="#general">General</a></li>
<li class="tab"><a href="#request">Request</a></li>
<li class="tab"><a id="visualselector-tab" href="#visualselector">Visual Filter Selector</a></li>
<li class="tab"><a id="visualselector-tab" href="#visualselector">Visual Selector</a></li>
<li class="tab"><a href="#filters-and-triggers">Filters &amp; Triggers</a></li>
<li class="tab"><a href="#notifications">Notifications</a></li>
</ul>
@@ -33,7 +32,7 @@
<div class="box-wrap inner">
<form class="pure-form pure-form-stacked"
action="{{ url_for('edit_page', uuid=uuid, next = request.args.get('next'), unpause_on_save = request.args.get('unpause_on_save')) }}" method="POST">
action="{{ url_for('edit_page', uuid=uuid, next = request.args.get('next') ) }}" method="POST">
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
<div class="tab-pane-inner" id="general">
@@ -62,12 +61,6 @@
<div class="pure-control-group">
{{ render_checkbox_field(form.extract_title_as_title) }}
</div>
<div class="pure-control-group">
{{ render_checkbox_field(form.filter_failure_notification_send) }}
<span class="pure-form-message-inline">
Sends a notification when the filter can no longer be seen on the page, good for knowing when the page changed and your filter will not work anymore.
</span>
</div>
</fieldset>
</div>
@@ -77,7 +70,6 @@
<span class="pure-form-message-inline">
<p>Use the <strong>Basic</strong> method (default) where your watched site doesn't need Javascript to render.</p>
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>
Tip: <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support">Connect using BrightData Proxies, find out more here.</a>
</span>
</div>
{% if form.proxy %}
@@ -88,43 +80,34 @@
</span>
</div>
{% endif %}
<div class="pure-control-group inline-radio">
{{ render_checkbox_field(form.ignore_status_codes) }}
</div>
<fieldset id="webdriver-override-options">
<div class="pure-form-message-inline">
<strong>If you're having trouble waiting for the page to be fully rendered (text missing etc), try increasing the 'wait' time here.</strong>
<br/>
This will wait <i>n</i> seconds before extracting the text.
</div>
<div class="pure-control-group">
{{ render_field(form.webdriver_delay) }}
<div class="pure-form-message-inline">
<strong>If you're having trouble waiting for the page to be fully rendered (text missing etc), try increasing the 'wait' time here.</strong>
<br/>
This will wait <i>n</i> seconds before extracting the text.
{% if using_global_webdriver_wait %}
<br/><strong>Using the current global default settings</strong>
{% endif %}
</div>
</div>
<div class="pure-control-group">
{{ render_field(form.webdriver_js_execute_code) }}
<div class="pure-form-message-inline">
Run this code before performing change detection, handy for filling in fields and other actions <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Run-JavaScript-before-change-detection">More help and examples here</a>
</div>
{% if using_global_webdriver_wait %}
<div class="pure-form-message-inline">
<strong>Using the current global default settings</strong>
</div>
{% endif %}
</fieldset>
<fieldset class="pure-group" id="requests-override-options">
{% if not playwright_enabled %}
<div class="pure-form-message-inline">
<strong>Request override is currently only used by the <i>Basic fast Plaintext/HTTP Client</i> method.</strong>
</div>
{% endif %}
<div class="pure-control-group" id="request-method">
<div class="pure-form-message-inline">
<strong>Request override is currently only used by the <i>Basic fast Plaintext/HTTP Client</i> method.</strong>
</div>
<div class="pure-control-group">
{{ render_field(form.method) }}
</div>
<div class="pure-control-group" id="request-headers">
<div class="pure-control-group">
{{ render_field(form.headers, rows=5, placeholder="Example
Cookie: foobar
User-Agent: wonderbra 1.0") }}
</div>
<div class="pure-control-group" id="request-body">
<div class="pure-control-group">
{{ render_field(form.body, rows=5, placeholder="Example
{
\"name\":\"John\",
@@ -132,24 +115,18 @@ User-Agent: wonderbra 1.0") }}
\"car\":null
}") }}
</div>
<div>
{{ render_checkbox_field(form.ignore_status_codes) }}
</div>
</fieldset>
<br/>
</div>
<div class="tab-pane-inner" id="notifications">
<strong>Note: <i>These settings override the global settings for this watch.</i></strong>
<fieldset>
<div class="pure-control-group inline-radio">
{{ render_checkbox_field(form.notification_muted) }}
</div>
<div class="field-group" id="notification-field-group">
{% if has_default_notification_urls %}
<div class="inline-warning">
<img class="inline-warning-icon" src="{{url_for('static_content', group='images', filename='notice.svg')}}" alt="Look out!" title="Lookout!"/>
There are <a href="{{ url_for('settings_page')}}#notifications">system-wide notification URLs enabled</a>, this form will override notification settings for this watch only &dash; an empty Notification URL list here will still send notifications.
</div>
{% endif %}
<a href="#notifications" id="notification-setting-reset-to-default" class="pure-button button-xsmall" style="right: 20px; top: 20px; position: absolute; background-color: #5f42dd; border-radius: 4px; font-size: 70%; color: #fff">Use system defaults</a>
{{ render_common_settings_form(form, emailprefix, settings_application) }}
<div class="field-group">
{{ render_common_settings_form(form, current_base_url, emailprefix) }}
</div>
</fieldset>
</div>
@@ -166,43 +143,18 @@ User-Agent: wonderbra 1.0") }}
</li>
</ul>
</div>
<fieldset>
<div class="pure-control-group">
{{ render_checkbox_field(form.check_unique_lines) }}
<span class="pure-form-message-inline">Good for websites that just move the content around, and you want to know when NEW content is added, compares new lines against all history for this watch.</span>
</div>
</fieldset>
<div class="pure-control-group">
{% set field = render_field(form.css_filter,
placeholder=".class-name or #some-id, or other CSS selector rule.",
class="m-d")
%}
{{ field }}
{% if '/text()' in field %}
<span class="pure-form-message-inline"><strong>Note!: //text() function does not work where the &lt;element&gt; contains &lt;![CDATA[]]&gt;</strong></span><br/>
{% endif %}
{{ render_field(form.css_filter, placeholder=".class-name or #some-id, or other CSS selector rule.",
class="m-d") }}
<span class="pure-form-message-inline">
<ul>
<li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li>
<li>JSON - Limit text to this JSON rule, using either <a href="https://pypi.org/project/jsonpath-ng/" target="new">JSONPath</a> or <a href="https://stedolan.github.io/jq/" target="new">jq</a> (if installed).
<ul>
<li>JSONPath: Prefix with <code>json:</code>, use <code>json:$</code> to force re-formatting if required, <a href="https://jsonpath.com/" target="new">test your JSONPath here</a>.</li>
{% if jq_support %}
<li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>.</li>
{% else %}
<li>jq support not installed</li>
{% endif %}
</ul>
</li>
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash,
<ul>
<li>Example: <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
<li>JSON - Limit text to this JSON rule, using <a href="https://pypi.org/project/jsonpath-ng/">JSONPath</a>, prefix with <code>"json:"</code>, use <code>json:$</code> to force re-formatting if required, <a
href="https://jsonpath.com/" target="new">test your JSONPath here</a></li>
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash, example <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
href="http://xpather.com/" target="new">test your XPath here</a></li>
<li>Example: Get all titles from an RSS feed <code>//title/text()</code></li>
</ul>
</li>
</ul>
Please be sure that you thoroughly understand how to write CSS, JSONPath, XPath{% if jq_support %}, or jq selector{%endif%} rules before filing an issue on GitHub! <a
Please be sure that you thoroughly understand how to write CSS or JSONPath, XPath selector rules before filing an issue on GitHub! <a
href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br/>
</span>
</div>
@@ -225,7 +177,7 @@ nav
<span class="pure-form-message-inline">
<ul>
<li>Each line processed separately, any line matching will be ignored (removed before creating the checksum)</li>
<li>Regular Expression support, wrap the entire line in forward slash <code>/regex/</code></li>
<li>Regular Expression support, wrap the line in forward slash <code>/regex/</code></li>
<li>Changing this will affect the comparison checksum which may trigger an alert</li>
<li>Use the preview/show current tab to see ignores</li>
</ul>
@@ -247,36 +199,13 @@ nav
</span>
</div>
</fieldset>
<fieldset>
<div class="pure-control-group">
{{ render_field(form.text_should_not_be_present, rows=5, placeholder="For example: Out of stock
Sold out
Not in stock
Unavailable") }}
<span class="pure-form-message-inline">
<ul>
<li>Block change-detection while this text is on the page, all text and regex are tested <i>case-insensitive</i>, good for waiting for when a product is available again</li>
<li>Block text is processed from the result-text that comes out of any CSS/JSON Filters for this watch</li>
<li>All lines here must not exist (think of each line as "OR")</li>
<li>Note: Wrap in forward slash / to use regex example: <code>/foo\d/</code></li>
</ul>
</span>
</div>
</fieldset>
<fieldset>
<div class="pure-control-group">
{{ render_field(form.extract_text, rows=5, placeholder="\d+ online") }}
<span class="pure-form-message-inline">
<ul>
<li>Extracts text in the final output (line by line) after other filters using regular expressions;
<ul>
<li>Regular expression &dash; example <code>/reports.+?2022/i</code></li>
<li>Use <code>//(?aiLmsux))</code> type flags (more <a href="https://docs.python.org/3/library/re.html#index-15">information here</a>)<br/></li>
<li>Keyword example &dash; example <code>Out of stock</code></li>
<li>Use groups to extract just that text &dash; example <code>/reports.+?(\d+)/i</code> returns a list of years only</li>
</ul>
</li>
<li>One line per regular-expression/ string match</li>
<li>Extracts text in the final output after other filters using regular expressions, for example <code>\d+ online</code></li>
<li>One line per regular-expression.</li>
</ul>
</span>
</div>
@@ -285,7 +214,7 @@ Unavailable") }}
<div class="tab-pane-inner visual-selector-ui" id="visualselector">
<img id="beta-logo" src="{{url_for('static_content', group='images', filename='beta-logo.png')}}">
<strong>Pro-tip:</strong> This tool is only for limiting which elements will be included on a change-detection, not for interacting with browser directly.
<fieldset>
<div class="pure-control-group">
{% if visualselector_enabled %}
@@ -325,11 +254,13 @@ Unavailable") }}
<div id="actions">
<div class="pure-control-group">
{{ render_button(form.save_button) }}
{{ render_button(form.save_button) }} {{ render_button(form.save_and_preview_button) }}
<a href="{{url_for('form_delete', uuid=uuid)}}"
class="pure-button button-small button-error ">Delete</a>
<a href="{{url_for('clear_watch_history', uuid=uuid)}}"
class="pure-button button-small button-error ">Clear History</a>
<a href="{{url_for('scrub_watch', uuid=uuid)}}"
class="pure-button button-small button-error ">Scrub</a>
<a href="{{url_for('form_clone', uuid=uuid)}}"
class="pure-button button-small ">Create Copy</a>
</div>

View File

@@ -5,7 +5,7 @@
<div class="tabs collapsable">
<ul>
<li class="tab" id=""><a href="#url-list">URL List</a></li>
<li class="tab" id="default-tab"><a href="#url-list">URL List</a></li>
<li class="tab"><a href="#distill-io">Distill.io</a></li>
</ul>
</div>

View File

@@ -3,39 +3,23 @@
{% block content %}
<script>
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
{% if last_error_screenshot %}
const error_screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid, error_screenshot=1) }}";
{% endif %}
</script>
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='diff-overview.js')}}" defer></script>
<div id="settings">
<h1>Current - {{watch.last_checked|format_timestamp_timeago}}</h1>
</div>
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
<div class="tabs">
<ul>
{% if last_error_text %}<li class="tab" id="error-text-tab"><a href="#error-text">Error Text</a></li> {% endif %}
{% if last_error_screenshot %}<li class="tab" id="error-screenshot-tab"><a href="#error-screenshot">Error Screenshot</a></li> {% endif %}
{% if history_n > 0 %}
<li class="tab" id="text-tab"><a href="#text">Text</a></li>
<li class="tab" id="default-tab"><a href="#text">Text</a></li>
<li class="tab" id="screenshot-tab"><a href="#screenshot">Screenshot</a></li>
{% endif %}
</ul>
</div>
<div id="diff-ui">
<div class="tab-pane-inner" id="error-text">
<div class="snapshot-age error">{{watch.error_text_ctime|format_seconds_ago}} seconds ago</div>
<pre>
{{ last_error_text }}
</pre>
</div>
<div class="tab-pane-inner" id="error-screenshot">
<div class="snapshot-age error">{{watch.snapshot_error_screenshot_ctime|format_seconds_ago}} seconds ago</div>
<img id="error-screenshot-img" style="max-width: 80%" alt="Current erroring screenshot from most recent request"/>
</div>
<div class="tab-pane-inner" id="text">
<div class="snapshot-age">{{watch.snapshot_text_ctime|format_timestamp_timeago}}</div>
<span class="ignored">Grey lines are ignored</span> <span class="triggered">Blue lines are triggers</span>
<table>
<tbody>
@@ -49,7 +33,6 @@
</tbody>
</table>
</div>
<div class="tab-pane-inner" id="screenshot">
<div class="tip">
For now, Differences are performed on text, not graphically, only the latest screenshot is available.
@@ -57,7 +40,6 @@
</br>
{% if is_html_webdriver %}
{% if screenshot %}
<div class="snapshot-age">{{watch.snapshot_screenshot_ctime|format_timestamp_timeago}}</div>
<img style="max-width: 80%" id="screenshot-img" alt="Current screenshot from most recent request"/>
{% else %}
No screenshot available just yet! Try rechecking the page.

View File

@@ -3,22 +3,22 @@
{% block content %}
<div class="edit-form">
<div class="box-wrap inner">
<form class="pure-form pure-form-stacked" action="{{url_for('clear_all_history')}}" method="POST">
<form class="pure-form pure-form-stacked" action="{{url_for('scrub_page')}}" method="POST">
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
<fieldset>
<div class="pure-control-group">
This will remove version history (snapshots) for ALL watches, but keep your list of URLs! <br/>
This will remove ALL version snapshots/data, but keep your list of URLs. <br/>
You may like to use the <strong>BACKUP</strong> link first.<br/>
</div>
<br/>
<div class="pure-control-group">
<label for="confirmtext">Confirmation text</label>
<input type="text" id="confirmtext" required="" name="confirmtext" value="" size="10"/>
<span class="pure-form-message-inline">Type in the word <strong>clear</strong> to confirm that you understand.</span>
<span class="pure-form-message-inline">Type in the word <strong>scrub</strong> to confirm that you understand!</span>
</div>
<br/>
<div class="pure-control-group">
<button type="submit" class="pure-button pure-button-primary">Clear History!</button>
<button type="submit" class="pure-button pure-button-primary">Scrub!</button>
</div>
<br/>
<div class="pure-control-group">

View File

@@ -16,7 +16,7 @@
<div class="edit-form">
<div class="tabs collapsable">
<ul>
<li class="tab" id=""><a href="#general">General</a></li>
<li class="tab" id="default-tab"><a href="#general">General</a></li>
<li class="tab"><a href="#notifications">Notifications</a></li>
<li class="tab"><a href="#fetching">Fetching</a></li>
<li class="tab"><a href="#filters">Global Filters</a></li>
@@ -32,17 +32,6 @@
{{ render_field(form.requests.form.time_between_check, class="time-check-widget") }}
<span class="pure-form-message-inline">Default time for all watches, when the watch does not have a specific time setting.</span>
</div>
<div class="pure-control-group">
{{ render_field(form.requests.form.jitter_seconds, class="jitter_seconds") }}
<span class="pure-form-message-inline">Example - 3 seconds random jitter could trigger up to 3 seconds earlier or up to 3 seconds later</span>
</div>
<div class="pure-control-group">
{{ render_field(form.application.form.filter_failure_notification_threshold_attempts, class="filter_failure_notification_threshold_attempts") }}
<span class="pure-form-message-inline">After this many consecutive times that the CSS/xPath filter is missing, send a notification
<br/>
Set to <strong>0</strong> to disable
</span>
</div>
<div class="pure-control-group">
{% if not hide_remove_pass %}
{% if current_user.is_authenticated %}
@@ -60,7 +49,7 @@
{{ render_field(form.application.form.base_url, placeholder="http://yoursite.com:5000/",
class="m-d") }}
<span class="pure-form-message-inline">
Base URL used for the <code>{base_url}</code> token in notifications and RSS links.<br/>Default value is the ENV var 'BASE_URL' (Currently "{{settings_application['current_base_url']}}"),
Base URL used for the {base_url} token in notifications and RSS links.<br/>Default value is the ENV var 'BASE_URL' (Currently "{{current_base_url}}"),
<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Configurable-BASE_URL-setting">read more here</a>.
</span>
</div>
@@ -69,6 +58,12 @@
{{ render_checkbox_field(form.application.form.extract_title_as_title) }}
<span class="pure-form-message-inline">Note: This will automatically apply to all existing watches.</span>
</div>
<div class="pure-control-group">
{{ render_checkbox_field(form.application.form.real_browser_save_screenshot) }}
<span class="pure-form-message-inline">When using a Chrome browser, a screenshot from the last check will be available on the Diff page</span>
</div>
<div class="pure-control-group">
{{ render_checkbox_field(form.application.form.empty_pages_are_a_change) }}
<span class="pure-form-message-inline">When a page contains HTML, but no renderable text appears (empty page), is this considered a change?</span>
@@ -87,7 +82,7 @@
<div class="tab-pane-inner" id="notifications">
<fieldset>
<div class="field-group">
{{ render_common_settings_form(form.application.form, emailprefix, settings_application) }}
{{ render_common_settings_form(form.application.form, current_base_url, emailprefix) }}
</div>
</fieldset>
</div>
@@ -99,8 +94,6 @@
<p>Use the <strong>Basic</strong> method (default) where your watched sites don't need Javascript to render.</p>
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>
</span>
<br/>
Tip: <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support">Connect using BrightData Proxies, find out more here.</a>
</div>
<fieldset class="pure-group" id="webdriver-override-options">
<div class="pure-form-message-inline">
@@ -150,7 +143,7 @@ nav
<ul>
<li>Note: This is applied globally in addition to the per-watch rules.</li>
<li>Each line processed separately, any line matching will be ignored (removed before creating the checksum)</li>
<li>Regular Expression support, wrap the entire line in forward slash <code>/regex/</code></li>
<li>Regular Expression support, wrap the line in forward slash <code>/regex/</code></li>
<li>Changing this will affect the comparison checksum which may trigger an alert</li>
<li>Use the preview/show current tab to see ignores</li>
</ul>
@@ -175,7 +168,7 @@ nav
<div class="pure-control-group">
{{ render_button(form.save_button) }}
<a href="{{url_for('index')}}" class="pure-button button-small button-cancel">Back</a>
<a href="{{url_for('clear_all_history')}}" class="pure-button button-small button-cancel">Clear Snapshot History</a>
<a href="{{url_for('scrub_page')}}" class="pure-button button-small button-cancel">Delete History Snapshot Data</a>
</div>
</div>

View File

@@ -1,40 +1,21 @@
{% extends 'base.html' %}
{% block content %}
{% from '_helpers.jinja' import render_simple_field, render_field %}
{% from '_pagination.jinja' import pagination %}
{% from '_helpers.jinja' import render_simple_field %}
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script>
<div class="box">
<form class="pure-form" action="{{ url_for('form_quick_watch_add') }}" method="POST" id="new-watch-form">
<form class="pure-form" action="{{ url_for('form_watch_add') }}" method="POST" id="new-watch-form">
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
<fieldset>
<legend>Add a new change detection watch</legend>
<div id="watch-add-wrapper-zone">
<div>
{{ render_simple_field(form.url, placeholder="https://...", required=true) }}
{{ render_simple_field(form.tag, value=active_tag if active_tag else '', placeholder="watch group") }}
</div>
<div>
{{ render_simple_field(form.watch_submit_button, title="Watch this URL!" ) }}
{{ render_simple_field(form.edit_and_watch_submit_button, title="Edit first then Watch") }}
</div>
</div>
{{ render_simple_field(form.url, placeholder="https://...", required=true) }}
{{ render_simple_field(form.tag, value=active_tag if active_tag else '', placeholder="watch group") }}
<button type="submit" class="pure-button pure-button-primary">Watch</button>
</fieldset>
<span style="color:#eee; font-size: 80%;"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread-white.svg')}}" /> Tip: You can also add 'shared' watches. <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Sharing-a-Watch">More info</a></a></span>
<span style="color:#eee; font-size: 80%;"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" /> Tip: You can also add 'shared' watches. <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Sharing-a-Watch">More info</a></a></span>
</form>
<form class="pure-form" action="{{ url_for('form_watch_list_checkbox_operations') }}" method="POST" id="watch-list-form">
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
<div id="checkbox-operations">
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="pause">Pause</button>
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="unpause">UnPause</button>
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="mute">Mute</button>
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="unmute">UnMute</button>
<button class="pure-button button-secondary button-xsmall" style="font-size: 70%" name="op" value="notification-default">Use default notification</button>
<button class="pure-button button-secondary button-xsmall" style="background: #dd4242; font-size: 70%" name="op" value="delete">Delete</button>
</div>
<div>
<a href="{{url_for('index')}}" class="pure-button button-tag {{'active' if not active_tag }}">All</a>
{% for tag in tags %}
@@ -44,32 +25,22 @@
{% endfor %}
</div>
{% set sort_order = request.args.get('order', 'asc') == 'asc' %}
{% set sort_attribute = request.args.get('sort', 'last_changed') %}
{% set pagination_page = request.args.get('page', 0) %}
<div id="watch-table-wrapper">
<table class="pure-table pure-table-striped watch-table">
<thead>
<tr>
<th><input style="vertical-align: middle" type="checkbox" id="check-all"/> #</th>
<th>#</th>
<th></th>
{% set link_order = "desc" if sort_order else "asc" %}
{% set arrow_span = "" %}
<th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('index', sort='label', order=link_order)}}">Website <span class='arrow {{link_order}}'></span></a></th>
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_checked' else 'inactive' }}" href="{{url_for('index', sort='last_checked', order=link_order)}}">Last Checked <span class='arrow {{link_order}}'></span></a></th>
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_changed' else 'inactive' }}" href="{{url_for('index', sort='last_changed', order=link_order)}}">Last Changed <span class='arrow {{link_order}}'></span></a></th>
<th></th>
<th>Last Checked</th>
<th>Last Changed</th>
<th></th>
</tr>
</thead>
<tbody>
{% set sorted_watches = watches|sort(attribute=sort_attribute, reverse=sort_order) %}
{% for watch in sorted_watches %}
{# WIP for pagination, disabled for now
{% if not ( loop.index >= 3 and loop.index <=4) %}{% continue %}{% endif %} -->
#}
{% for watch in watches %}
<tr id="{{ watch.uuid }}"
class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }}
{% if watch.last_error is defined and watch.last_error != False %}error{% endif %}
@@ -77,15 +48,9 @@
{% if watch.paused is defined and watch.paused != False %}paused{% endif %}
{% if watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %}unviewed{% endif %}
{% if watch.uuid in queued_uuids %}queued{% endif %}">
<td class="inline checkbox-uuid" ><input name="uuids" type="checkbox" value="{{ watch.uuid}} "/> <span>{{ loop.index }}</span></td>
<td class="inline watch-controls">
{% if not watch.paused %}
<a class="state-off" href="{{url_for('index', op='pause', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause checks" title="Pause checks"/></a>
{% else %}
<a class="state-on" href="{{url_for('index', op='pause', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='play.svg')}}" alt="UnPause checks" title="UnPause checks"/></a>
{% endif %}
<a class="state-{{'on' if watch.notification_muted}}" href="{{url_for('index', op='mute', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications"/></a>
</td>
<td class="inline">{{ loop.index }}</td>
<td class="inline paused-state state-{{watch.paused}}"><a href="{{url_for('index', pause=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause" title="Pause"/></a></td>
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
<a class="external" target="_blank" rel="noopener" href="{{ watch.url.replace('source:','') }}"></a>
<a href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" /></a>
@@ -102,8 +67,8 @@
<span class="watch-tag-list">{{ watch.tag}}</span>
{% endif %}
</td>
<td class="last-checked">{{watch|format_last_checked_time|safe}}</td>
<td class="last-changed">{% if watch.history_n >=2 and watch.last_changed >0 %}
<td class="last-checked">{{watch|format_last_checked_time}}</td>
<td class="last-changed">{% if watch.history_n >=2 and watch.last_changed %}
{{watch.last_changed|format_timestamp_timeago}}
{% else %}
Not yet
@@ -116,7 +81,7 @@
{% if watch.history_n >= 2 %}
<a href="{{ url_for('diff_history_page', uuid=watch.uuid) }}" target="{{watch.uuid}}" class="pure-button button-small pure-button-primary diff-link">Diff</a>
{% else %}
{% if watch.history_n == 1 or (watch.history_n ==0 and watch.error_text_ctime )%}
{% if watch.history_n == 1 %}
<a href="{{ url_for('preview_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button button-small pure-button-primary">Preview</a>
{% endif %}
{% endif %}
@@ -139,11 +104,6 @@
<a href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='Generic_Feed-icon.svg')}}" height="15"></a>
</li>
</ul>
{# WIP for pagination, disabled for now
{{ pagination(sorted_watches,3, pagination_page) }}
#}
</div>
</form>
</div>
{% endblock %}

View File

@@ -32,8 +32,6 @@ def app(request):
"""Create application for the tests."""
datastore_path = "./test-datastore"
# So they don't delay in fetching
os.environ["MINIMUM_SECONDS_RECHECK_TIME"] = "0"
try:
os.mkdir(datastore_path)
except FileExistsError:

View File

@@ -2,7 +2,7 @@
import time
from flask import url_for
from ..util import live_server_setup, wait_for_all_checks
from ..util import live_server_setup
import logging
@@ -29,8 +29,14 @@ def test_fetch_webdriver_content(client, live_server):
assert b"1 Imported" in res.data
time.sleep(3)
wait_for_all_checks(client)
attempt = 0
while attempt < 20:
res = client.get(url_for("index"))
if not b'Checking now' in res.data:
break
logging.getLogger().info("Waiting for check to not say 'Checking now'..")
time.sleep(3)
attempt += 1
res = client.get(

View File

@@ -1,2 +0,0 @@
"""Tests for the app."""

View File

@@ -1,14 +0,0 @@
#!/usr/bin/python3
from .. import conftest
#def pytest_addoption(parser):
# parser.addoption("--url_suffix", action="store", default="identifier for request")
#def pytest_generate_tests(metafunc):
# # This is called for every test. Only get/set command line arguments
# # if the argument is specified in the list of test "fixturenames".
# option_value = metafunc.config.option.url_suffix
# if 'url_suffix' in metafunc.fixturenames and option_value is not None:
# metafunc.parametrize("url_suffix", [option_value])

View File

@@ -1,10 +0,0 @@
{
"proxy-one": {
"label": "One",
"url": "http://127.0.0.1:3128"
},
"proxy-two": {
"label": "two",
"url": "http://127.0.0.1:3129"
}
}

View File

@@ -1,41 +0,0 @@
acl localnet src 0.0.0.1-0.255.255.255 # RFC 1122 "this" network (LAN)
acl localnet src 10.0.0.0/8 # RFC 1918 local private network (LAN)
acl localnet src 100.64.0.0/10 # RFC 6598 shared address space (CGN)
acl localnet src 169.254.0.0/16 # RFC 3927 link-local (directly plugged) machines
acl localnet src 172.16.0.0/12 # RFC 1918 local private network (LAN)
acl localnet src 192.168.0.0/16 # RFC 1918 local private network (LAN)
acl localnet src fc00::/7 # RFC 4193 local private network range
acl localnet src fe80::/10 # RFC 4291 link-local (directly plugged) machines
acl localnet src 159.65.224.174
acl SSL_ports port 443
acl Safe_ports port 80 # http
acl Safe_ports port 21 # ftp
acl Safe_ports port 443 # https
acl Safe_ports port 70 # gopher
acl Safe_ports port 210 # wais
acl Safe_ports port 1025-65535 # unregistered ports
acl Safe_ports port 280 # http-mgmt
acl Safe_ports port 488 # gss-http
acl Safe_ports port 591 # filemaker
acl Safe_ports port 777 # multiling http
acl CONNECT method CONNECT
http_access deny !Safe_ports
http_access deny CONNECT !SSL_ports
http_access allow localhost manager
http_access deny manager
http_access allow localhost
http_access allow localnet
http_access deny all
http_port 3128
coredump_dir /var/spool/squid
refresh_pattern ^ftp: 1440 20% 10080
refresh_pattern ^gopher: 1440 0% 1440
refresh_pattern -i (/cgi-bin/|\?) 0 0% 0
refresh_pattern \/(Packages|Sources)(|\.bz2|\.gz|\.xz)$ 0 0% 0 refresh-ims
refresh_pattern \/Release(|\.gpg)$ 0 0% 0 refresh-ims
refresh_pattern \/InRelease$ 0 0% 0 refresh-ims
refresh_pattern \/(Translation-.*)(|\.bz2|\.gz|\.xz)$ 0 0% 0 refresh-ims
refresh_pattern . 0 20% 4320
logfile_rotate 0

View File

@@ -1,38 +0,0 @@
#!/usr/bin/python3
import time
from flask import url_for
from ..util import live_server_setup
def test_preferred_proxy(client, live_server):
time.sleep(1)
live_server_setup(live_server)
time.sleep(1)
url = "http://chosen.changedetection.io"
res = client.post(
url_for("import_page"),
# Because a URL wont show in squid/proxy logs due it being SSLed
# Use plain HTTP or a specific domain-name here
data={"urls": url},
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(2)
res = client.post(
url_for("edit_page", uuid="first"),
data={
"css_filter": "",
"fetch_backend": "html_requests",
"headers": "",
"proxy": "proxy-two",
"tag": "",
"url": url,
},
follow_redirects=True
)
assert b"Updated watch." in res.data
time.sleep(2)
# Now the request should appear in the second-squid logs

View File

@@ -1,19 +0,0 @@
#!/usr/bin/python3
import time
from flask import url_for
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
# just make a request, we will grep in the docker logs to see it actually got called
def test_check_basic_change_detection_functionality(client, live_server):
live_server_setup(live_server)
res = client.post(
url_for("import_page"),
# Because a URL wont show in squid/proxy logs due it being SSLed
# Use plain HTTP or a specific domain-name here
data={"urls": "http://one.changedetection.io"},
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(3)

View File

@@ -19,6 +19,7 @@ def test_check_access_control(app, client):
)
assert b"Password protection enabled." in res.data
assert b"LOG OUT" not in res.data
# Check we hit the login
res = c.get(url_for("index"), follow_redirects=True)
@@ -37,40 +38,7 @@ def test_check_access_control(app, client):
follow_redirects=True
)
# Yes we are correctly logged in
assert b"LOG OUT" in res.data
# 598 - Password should be set and not accidently removed
res = c.post(
url_for("settings_page"),
data={
"requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_requests"},
follow_redirects=True
)
res = c.get(url_for("logout"),
follow_redirects=True)
res = c.get(url_for("settings_page"),
follow_redirects=True)
assert b"Login" in res.data
res = c.get(url_for("login"))
assert b"Login" in res.data
res = c.post(
url_for("login"),
data={"password": "foobar"},
follow_redirects=True
)
# Yes we are correctly logged in
assert b"LOG OUT" in res.data
res = c.get(url_for("settings_page"))
# Menu should be available now

View File

@@ -95,8 +95,6 @@ def test_api_simple(client, live_server):
assert watch_uuid in json.loads(res.data).keys()
before_recheck_info = json.loads(res.data)[watch_uuid]
assert before_recheck_info['last_checked'] != 0
#705 `last_changed` should be zero on the first check
assert before_recheck_info['last_changed'] == 0
assert before_recheck_info['title'] == 'My test URL'
set_modified_response()
@@ -147,16 +145,6 @@ def test_api_simple(client, live_server):
# @todo how to handle None/default global values?
assert watch['history_n'] == 2, "Found replacement history section, which is in its own API"
# basic systeminfo check
res = client.get(
url_for("systeminfo"),
headers={'x-api-key': api_key},
)
info = json.loads(res.data)
assert info.get('watch_count') == 1
assert info.get('uptime') > 0.5
# Finally delete the watch
res = client.delete(
url_for("watch", uuid=watch_uuid),

View File

@@ -90,14 +90,6 @@ def test_check_basic_change_detection_functionality(client, live_server):
res = client.get(url_for("diff_history_page", uuid="first"))
assert b'Compare newest' in res.data
# Check the [preview] pulls the right one
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True
)
assert b'which has this one new line' in res.data
assert b'Which is across multiple lines' not in res.data
time.sleep(2)
# Do this a few times.. ensures we dont accidently set the status

View File

@@ -1,137 +0,0 @@
#!/usr/bin/python3
import time
from flask import url_for
from . util import live_server_setup
from changedetectionio import html_tools
def set_original_ignore_response():
test_return_data = """<html>
<body>
Some initial text</br>
<p>Which is across multiple lines</p>
</br>
So let's see what happens. </br>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
def set_modified_original_ignore_response():
test_return_data = """<html>
<body>
Some NEW nice initial text</br>
<p>Which is across multiple lines</p>
</br>
So let's see what happens. </br>
<p>new ignore stuff</p>
<p>out of stock</p>
<p>blah</p>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
# Is the same but includes ZZZZZ, 'ZZZZZ' is the last line in ignore_text
def set_modified_response_minus_block_text():
test_return_data = """<html>
<body>
Some NEW nice initial text</br>
<p>Which is across multiple lines</p>
<p>now on sale $2/p>
</br>
So let's see what happens. </br>
<p>new ignore stuff</p>
<p>blah</p>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
def test_check_block_changedetection_text_NOT_present(client, live_server):
sleep_time_for_fetch_thread = 3
live_server_setup(live_server)
# Use a mix of case in ZzZ to prove it works case-insensitive.
ignore_text = "out of stoCk\r\nfoobar"
set_original_ignore_response()
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# Goto the edit page, add our ignore text
# Add our URL to the import page
res = client.post(
url_for("edit_page", uuid="first"),
data={"text_should_not_be_present": ignore_text, "url": test_url, 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# Check it saved
res = client.get(
url_for("edit_page", uuid="first"),
)
assert bytes(ignore_text.encode('utf-8')) in res.data
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# It should report nothing found (no new 'unviewed' class)
res = client.get(url_for("index"))
assert b'unviewed' not in res.data
assert b'/test-endpoint' in res.data
# The page changed, BUT the text is still there, just the rest of it changes, we should not see a change
set_modified_original_ignore_response()
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# It should report nothing found (no new 'unviewed' class)
res = client.get(url_for("index"))
assert b'unviewed' not in res.data
assert b'/test-endpoint' in res.data
# Now we set a change where the text is gone, it should now trigger
set_modified_response_minus_block_text()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(sleep_time_for_fetch_thread)
res = client.get(url_for("index"))
assert b'unviewed' in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data

View File

@@ -11,17 +11,16 @@ def test_setup(live_server):
live_server_setup(live_server)
def _runner_test_http_errors(client, live_server, http_code, expected_text):
def test_error_handler(client, live_server):
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write("Now you going to get a {} error code\n".format(http_code))
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
test_url = url_for('test_endpoint',
status_code=http_code,
status_code=403,
_external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url},
@@ -29,39 +28,20 @@ def _runner_test_http_errors(client, live_server, http_code, expected_text):
)
assert b"1 Imported" in res.data
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(2)
time.sleep(3)
res = client.get(url_for("index"))
# no change
assert b'unviewed' not in res.data
assert bytes(expected_text.encode('utf-8')) in res.data
# Error viewing tabs should appear
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True
)
assert b'Error Text' in res.data
# 'Error Screenshot' only when in playwright mode
#assert b'Error Screenshot' in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_http_error_handler(client, live_server):
_runner_test_http_errors(client, live_server, 403, 'Access denied')
_runner_test_http_errors(client, live_server, 404, 'Page not found')
_runner_test_http_errors(client, live_server, 500, '(Internal server Error) received')
_runner_test_http_errors(client, live_server, 400, 'Error - Request returned a HTTP error code 400')
assert b'Status Code 403' in res.data
assert bytes("just now".encode('utf-8')) in res.data
# Just to be sure error text is properly handled
def test_DNS_errors(client, live_server):
def test_error_text_handler(client, live_server):
# Give the endpoint time to spin up
time.sleep(1)
@@ -73,11 +53,13 @@ def test_DNS_errors(client, live_server):
)
assert b"1 Imported" in res.data
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(3)
res = client.get(url_for("index"))
assert b'Name or service not known' in res.data
# Should always record that we tried
assert bytes("just now".encode('utf-8')) in res.data

View File

@@ -15,7 +15,7 @@ def set_original_response():
</br>
So let's see what happens. </br>
<div id="sametext">Some text thats the same</div>
<div class="changetext">Some text that will change</div>
<div id="changetext">Some text that will change</div>
</body>
</html>
"""
@@ -33,8 +33,7 @@ def set_modified_response():
</br>
So let's see what happens. </br>
<div id="sametext">Some text thats the same</div>
<div class="changetext">Some text that did change ( 1000 online <br/> 80 guests<br/> 2000 online )</div>
<div class="changetext">SomeCase insensitive 3456</div>
<div id="changetext">Some text that did change ( 1000 online <br/> 80 guests)</div>
</body>
</html>
"""
@@ -45,78 +44,11 @@ def set_modified_response():
return None
def set_multiline_response():
test_return_data = """<html>
<body>
<p>Something <br/>
across 6 billion multiple<br/>
lines
</p>
<div>aaand something lines</div>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
return None
def test_setup(client, live_server):
live_server_setup(live_server)
def test_check_filter_multiline(client, live_server):
set_multiline_response()
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(3)
# Goto the edit page, add our ignore text
# Add our URL to the import page
res = client.post(
url_for("edit_page", uuid="first"),
data={"css_filter": '',
'extract_text': '/something.+?6 billion.+?lines/si',
"url": test_url,
"tag": "",
"headers": "",
'fetch_backend': "html_requests"
},
follow_redirects=True
)
assert b"Updated watch." in res.data
time.sleep(3)
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True
)
assert b'<div class="">Something' in res.data
assert b'<div class="">across 6 billion multiple' in res.data
assert b'<div class="">lines' in res.data
# but the last one, which also says 'lines' shouldnt be here (non-greedy match checking)
assert b'aaand something lines' not in res.data
def test_check_filter_and_regex_extract(client, live_server):
sleep_time_for_fetch_thread = 3
css_filter = ".changetext"
live_server_setup(live_server)
css_filter = "#changetext"
set_original_response()
@@ -132,7 +64,6 @@ def test_check_filter_and_regex_extract(client, live_server):
)
assert b"1 Imported" in res.data
time.sleep(1)
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
@@ -144,7 +75,7 @@ def test_check_filter_and_regex_extract(client, live_server):
res = client.post(
url_for("edit_page", uuid="first"),
data={"css_filter": css_filter,
'extract_text': '\d+ online\r\n\d+ guests\r\n/somecase insensitive \d+/i\r\n/somecase insensitive (345\d)/i',
'extract_text': '\d+ online\n\d+ guests',
"url": test_url,
"tag": "",
"headers": "",
@@ -155,6 +86,15 @@ def test_check_filter_and_regex_extract(client, live_server):
assert b"Updated watch." in res.data
# Check it saved
res = client.get(
url_for("edit_page", uuid="first"),
)
assert b'\d+ online' in res.data
# Trigger a check
# client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
@@ -179,20 +119,9 @@ def test_check_filter_and_regex_extract(client, live_server):
# Class will be blank for now because the frontend didnt apply the diff
assert b'<div class="">1000 online' in res.data
# All regex matching should be here
assert b'<div class="">2000 online' in res.data
# Both regexs should be here
assert b'<div class="">80 guests' in res.data
# Regex with flag handling should be here
assert b'<div class="">SomeCase insensitive 3456' in res.data
# Singular group from /somecase insensitive (345\d)/i
assert b'<div class="">3456' in res.data
# Regex with multiline flag handling should be here
# Should not be here
assert b'Some text that did change' not in res.data
assert b'Some text that did change' not in res.data

View File

@@ -1,134 +0,0 @@
#!/usr/bin/python3
# https://www.reddit.com/r/selfhosted/comments/wa89kp/comment/ii3a4g7/?context=3
import os
import time
from flask import url_for
from .util import set_original_response, live_server_setup
from changedetectionio.model import App
def set_response_without_filter():
test_return_data = """<html>
<body>
Some initial text</br>
<p>Which is across multiple lines</p>
</br>
So let's see what happens. </br>
<div id="nope-doesnt-exist">Some text thats the same</div>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
return None
def set_response_with_filter():
test_return_data = """<html>
<body>
Some initial text</br>
<p>Which is across multiple lines</p>
</br>
So let's see what happens. </br>
<div class="ticket-available">Ticket now on sale!</div>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
return None
def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_server):
# Filter knowingly doesn't exist, like someone setting up a known filter to see if some cinema tickets are on sale again
# And the page has that filter available
# Then I should get a notification
live_server_setup(live_server)
# Give the endpoint time to spin up
time.sleep(1)
set_response_without_filter()
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("form_quick_watch_add"),
data={"url": test_url, "tag": 'cinema'},
follow_redirects=True
)
assert b"Watch added" in res.data
# Give the thread time to pick up the first version
time.sleep(3)
# Goto the edit page, add our ignore text
# Add our URL to the import page
url = url_for('test_notification_endpoint', _external=True)
notification_url = url.replace('http', 'json')
print(">>>> Notification URL: " + notification_url)
# Just a regular notification setting, this will be used by the special 'filter not found' notification
notification_form_data = {"notification_urls": notification_url,
"notification_title": "New ChangeDetection.io Notification - {watch_url}",
"notification_body": "BASE URL: {base_url}\n"
"Watch URL: {watch_url}\n"
"Watch UUID: {watch_uuid}\n"
"Watch title: {watch_title}\n"
"Watch tag: {watch_tag}\n"
"Preview: {preview_url}\n"
"Diff URL: {diff_url}\n"
"Snapshot: {current_snapshot}\n"
"Diff: {diff}\n"
"Diff Full: {diff_full}\n"
":-)",
"notification_format": "Text"}
notification_form_data.update({
"url": test_url,
"tag": "my tag",
"title": "my title",
"headers": "",
"css_filter": '.ticket-available',
"fetch_backend": "html_requests"})
res = client.post(
url_for("edit_page", uuid="first"),
data=notification_form_data,
follow_redirects=True
)
assert b"Updated watch." in res.data
time.sleep(3)
# Shouldn't exist, shouldn't have fired
assert not os.path.isfile("test-datastore/notification.txt")
# Now the filter should exist
set_response_with_filter()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(3)
assert os.path.isfile("test-datastore/notification.txt")
with open("test-datastore/notification.txt", 'r') as f:
notification = f.read()
assert 'Ticket now on sale' in notification
os.unlink("test-datastore/notification.txt")
# Test that if it gets removed, then re-added, we get a notification
# Remove the target and re-add it, we should get a new notification
set_response_without_filter()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(3)
assert not os.path.isfile("test-datastore/notification.txt")
set_response_with_filter()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(3)
assert os.path.isfile("test-datastore/notification.txt")
# Also test that the filter was updated after the first one was requested

View File

@@ -1,144 +0,0 @@
import os
import time
import re
from flask import url_for
from .util import set_original_response, live_server_setup
from changedetectionio.model import App
def set_response_with_filter():
test_return_data = """<html>
<body>
Some initial text</br>
<p>Which is across multiple lines</p>
</br>
So let's see what happens. </br>
<div id="nope-doesnt-exist">Some text thats the same</div>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
return None
def run_filter_test(client, content_filter):
# Give the endpoint time to spin up
time.sleep(1)
# cleanup for the next
client.get(
url_for("form_delete", uuid="all"),
follow_redirects=True
)
if os.path.isfile("test-datastore/notification.txt"):
os.unlink("test-datastore/notification.txt")
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("form_quick_watch_add"),
data={"url": test_url, "tag": ''},
follow_redirects=True
)
assert b"Watch added" in res.data
# Give the thread time to pick up the first version
time.sleep(3)
# Goto the edit page, add our ignore text
# Add our URL to the import page
url = url_for('test_notification_endpoint', _external=True)
notification_url = url.replace('http', 'json')
print(">>>> Notification URL: " + notification_url)
# Just a regular notification setting, this will be used by the special 'filter not found' notification
notification_form_data = {"notification_urls": notification_url,
"notification_title": "New ChangeDetection.io Notification - {watch_url}",
"notification_body": "BASE URL: {base_url}\n"
"Watch URL: {watch_url}\n"
"Watch UUID: {watch_uuid}\n"
"Watch title: {watch_title}\n"
"Watch tag: {watch_tag}\n"
"Preview: {preview_url}\n"
"Diff URL: {diff_url}\n"
"Snapshot: {current_snapshot}\n"
"Diff: {diff}\n"
"Diff Full: {diff_full}\n"
":-)",
"notification_format": "Text"}
notification_form_data.update({
"url": test_url,
"tag": "my tag",
"title": "my title",
"headers": "",
"filter_failure_notification_send": 'y',
"css_filter": content_filter,
"fetch_backend": "html_requests"})
res = client.post(
url_for("edit_page", uuid="first"),
data=notification_form_data,
follow_redirects=True
)
assert b"Updated watch." in res.data
time.sleep(3)
# Now the notification should not exist, because we didnt reach the threshold
assert not os.path.isfile("test-datastore/notification.txt")
for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT):
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(3)
# We should see something in the frontend
assert b'Warning, filter' in res.data
# Now it should exist and contain our "filter not found" alert
assert os.path.isfile("test-datastore/notification.txt")
notification = False
with open("test-datastore/notification.txt", 'r') as f:
notification = f.read()
assert 'CSS/xPath filter was not present in the page' in notification
assert content_filter.replace('"', '\\"') in notification
# Remove it and prove that it doesnt trigger when not expected
os.unlink("test-datastore/notification.txt")
set_response_with_filter()
for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT):
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(3)
# It should have sent a notification, but..
assert os.path.isfile("test-datastore/notification.txt")
# but it should not contain the info about the failed filter
with open("test-datastore/notification.txt", 'r') as f:
notification = f.read()
assert not 'CSS/xPath filter was not present in the page' in notification
# cleanup for the next
client.get(
url_for("form_delete", uuid="all"),
follow_redirects=True
)
os.unlink("test-datastore/notification.txt")
def test_setup(live_server):
live_server_setup(live_server)
def test_check_css_filter_failure_notification(client, live_server):
set_original_response()
time.sleep(1)
run_filter_test(client, '#nope-doesnt-exist')
def test_check_xpath_filter_failure_notification(client, live_server):
set_original_response()
time.sleep(1)
run_filter_test(client, '//*[@id="nope-doesnt-exist"]')
# Test that notification is never sent

View File

@@ -137,3 +137,54 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server):
res = client.get(url_for("index"))
assert b'unviewed' in res.data
# Tests the whole stack works with staus codes ignored
def test_403_page_check_fails_without_ignore_status_code(client, live_server):
sleep_time_for_fetch_thread = 3
set_original_response()
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
test_url = url_for('test_endpoint', status_code=403, _external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# Goto the edit page, check our ignore option
# Add our URL to the import page
res = client.post(
url_for("edit_page", uuid="first"),
data={"url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# Make a change
set_some_changed_response()
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# It should have 'unviewed' still
# Because it should be looking at only that 'sametext' id
res = client.get(url_for("index"))
assert b'Status Code 403' in res.data

View File

@@ -2,15 +2,10 @@
# coding=utf-8
import time
from flask import url_for, escape
from flask import url_for
from . util import live_server_setup
import pytest
jq_support = True
try:
import jq
except ModuleNotFoundError:
jq_support = False
def test_setup(live_server):
live_server_setup(live_server)
@@ -41,28 +36,16 @@ and it can also be repeated
from .. import html_tools
# See that we can find the second <script> one, which is not broken, and matches our filter
text = html_tools.extract_json_as_string(content, "json:$.offers.price")
text = html_tools.extract_json_as_string(content, "$.offers.price")
assert text == "23.5"
# also check for jq
if jq_support:
text = html_tools.extract_json_as_string(content, "jq:.offers.price")
assert text == "23.5"
text = html_tools.extract_json_as_string('{"id":5}', "jq:.id")
assert text == "5"
text = html_tools.extract_json_as_string('{"id":5}', "json:$.id")
text = html_tools.extract_json_as_string('{"id":5}', "$.id")
assert text == "5"
# When nothing at all is found, it should throw JSONNOTFound
# Which is caught and shown to the user in the watch-overview table
with pytest.raises(html_tools.JSONNotFound) as e_info:
html_tools.extract_json_as_string('COMPLETE GIBBERISH, NO JSON!', "json:$.id")
if jq_support:
with pytest.raises(html_tools.JSONNotFound) as e_info:
html_tools.extract_json_as_string('COMPLETE GIBBERISH, NO JSON!', "jq:.id")
html_tools.extract_json_as_string('COMPLETE GIBBERISH, NO JSON!', "$.id")
def set_original_ext_response():
data = """
@@ -83,7 +66,6 @@ def set_original_ext_response():
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(data)
return None
def set_modified_ext_response():
data = """
@@ -104,7 +86,6 @@ def set_modified_ext_response():
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(data)
return None
def set_original_response():
test_return_data = """
@@ -203,10 +184,10 @@ def test_check_json_without_filter(client, live_server):
assert b'&#34;&lt;b&gt;' in res.data
assert res.data.count(b'{\n') >= 2
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def check_json_filter(json_filter, client, live_server):
def test_check_json_filter(client, live_server):
json_filter = 'json:boss.name'
set_original_response()
# Give the endpoint time to spin up
@@ -245,7 +226,7 @@ def check_json_filter(json_filter, client, live_server):
res = client.get(
url_for("edit_page", uuid="first"),
)
assert bytes(escape(json_filter).encode('utf-8')) in res.data
assert bytes(json_filter.encode('utf-8')) in res.data
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
@@ -271,17 +252,10 @@ def check_json_filter(json_filter, client, live_server):
# And #462 - check we see the proper utf-8 string there
assert "Örnsköldsvik".encode('utf-8') in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_check_jsonpath_filter(client, live_server):
check_json_filter('json:boss.name', client, live_server)
def test_check_json_filter_bool_val(client, live_server):
json_filter = "json:$['available']"
def test_check_jq_filter(client, live_server):
if jq_support:
check_json_filter('jq:.boss.name', client, live_server)
def check_json_filter_bool_val(json_filter, client, live_server):
set_original_response()
# Give the endpoint time to spin up
@@ -330,22 +304,14 @@ def check_json_filter_bool_val(json_filter, client, live_server):
# But the change should be there, tho its hard to test the change was detected because it will show old and new versions
assert b'false' in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_check_jsonpath_filter_bool_val(client, live_server):
check_json_filter_bool_val("json:$['available']", client, live_server)
def test_check_jq_filter_bool_val(client, live_server):
if jq_support:
check_json_filter_bool_val("jq:.available", client, live_server)
# Re #265 - Extended JSON selector test
# Stuff to consider here
# - Selector should be allowed to return empty when it doesnt match (people might wait for some condition)
# - The 'diff' tab could show the old and new content
# - Form should let us enter a selector that doesnt (yet) match anything
def check_json_ext_filter(json_filter, client, live_server):
def test_check_json_ext_filter(client, live_server):
json_filter = 'json:$[?(@.status==Sold)]'
set_original_ext_response()
# Give the endpoint time to spin up
@@ -384,7 +350,7 @@ def check_json_ext_filter(json_filter, client, live_server):
res = client.get(
url_for("edit_page", uuid="first"),
)
assert bytes(escape(json_filter).encode('utf-8')) in res.data
assert bytes(json_filter.encode('utf-8')) in res.data
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
@@ -410,12 +376,3 @@ def check_json_ext_filter(json_filter, client, live_server):
assert b'ForSale' not in res.data
assert b'Sold' in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_check_jsonpath_ext_filter(client, live_server):
check_json_ext_filter('json:$[?(@.status==Sold)]', client, live_server)
def test_check_jq_ext_filter(client, live_server):
if jq_support:
check_json_ext_filter('jq:.[] | select(.status | contains("Sold"))', client, live_server)

View File

@@ -4,13 +4,7 @@ import re
from flask import url_for
from . util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup
import logging
from changedetectionio.notification import (
default_notification_body,
default_notification_format,
default_notification_title,
valid_notification_formats,
)
from changedetectionio.notification import default_notification_body, default_notification_title
def test_setup(live_server):
live_server_setup(live_server)
@@ -26,26 +20,9 @@ def test_check_notification(client, live_server):
# Re 360 - new install should have defaults set
res = client.get(url_for("settings_page"))
notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json')
assert default_notification_body.encode() in res.data
assert default_notification_title.encode() in res.data
#####################
# Set this up for when we remove the notification from the watch, it should fallback with these details
res = client.post(
url_for("settings_page"),
data={"application-notification_urls": notification_url,
"application-notification_title": "fallback-title "+default_notification_title,
"application-notification_body": "fallback-body "+default_notification_body,
"application-notification_format": default_notification_format,
"requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Settings updated." in res.data
# When test mode is in BASE_URL env mode, we should see this already configured
env_base_url = os.getenv('BASE_URL', '').strip()
if len(env_base_url):
@@ -59,7 +36,7 @@ def test_check_notification(client, live_server):
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("form_quick_watch_add"),
url_for("form_watch_add"),
data={"url": test_url, "tag": ''},
follow_redirects=True
)
@@ -70,6 +47,8 @@ def test_check_notification(client, live_server):
# Goto the edit page, add our ignore text
# Add our URL to the import page
url = url_for('test_notification_endpoint', _external=True)
notification_url = url.replace('http', 'json')
print (">>>> Notification URL: "+notification_url)
@@ -175,34 +154,6 @@ def test_check_notification(client, live_server):
time.sleep(1)
assert os.path.exists("test-datastore/notification.txt") == False
res = client.get(url_for("notification_logs"))
# be sure we see it in the output log
assert b'New ChangeDetection.io Notification - ' + test_url.encode('utf-8') in res.data
set_original_response()
res = client.post(
url_for("edit_page", uuid="first"),
data={
"url": test_url,
"tag": "my tag",
"title": "my title",
"notification_urls": '',
"notification_title": '',
"notification_body": '',
"notification_format": default_notification_format,
"fetch_backend": "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
time.sleep(2)
# Verify what was sent as a notification, this file should exist
with open("test-datastore/notification.txt", "r") as f:
notification_submission = f.read()
assert "fallback-title" in notification_submission
assert "fallback-body" in notification_submission
# cleanup for the next
client.get(
url_for("form_delete", uuid="all"),
@@ -217,7 +168,7 @@ def test_notification_validation(client, live_server):
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("form_quick_watch_add"),
url_for("form_watch_add"),
data={"url": test_url, "tag": 'nice one'},
follow_redirects=True
)
@@ -225,20 +176,20 @@ def test_notification_validation(client, live_server):
assert b"Watch added" in res.data
# Re #360 some validation
# res = client.post(
# url_for("edit_page", uuid="first"),
# data={"notification_urls": 'json://localhost/foobar',
# "notification_title": "",
# "notification_body": "",
# "notification_format": "Text",
# "url": test_url,
# "tag": "my tag",
# "title": "my title",
# "headers": "",
# "fetch_backend": "html_requests"},
# follow_redirects=True
# )
# assert b"Notification Body and Title is required when a Notification URL is used" in res.data
res = client.post(
url_for("edit_page", uuid="first"),
data={"notification_urls": 'json://localhost/foobar',
"notification_title": "",
"notification_body": "",
"notification_format": "Text",
"url": test_url,
"tag": "my tag",
"title": "my title",
"headers": "",
"fetch_backend": "html_requests"},
follow_redirects=True
)
assert b"Notification Body and Title is required when a Notification URL is used" in res.data
# Now adding a wrong token should give us an error
res = client.post(
@@ -260,5 +211,3 @@ def test_notification_validation(client, live_server):
url_for("form_delete", uuid="all"),
follow_redirects=True
)

View File

@@ -16,7 +16,7 @@ def test_check_notification_error_handling(client, live_server):
# use a different URL so that it doesnt interfere with the actual check until we are ready
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("form_quick_watch_add"),
url_for("form_watch_add"),
data={"url": "https://changedetection.io/CHANGELOG.txt", "tag": ''},
follow_redirects=True
)

View File

@@ -1,43 +0,0 @@
#!/usr/bin/python3
import time
from flask import url_for
from .util import live_server_setup
def set_original_ignore_response():
test_return_data = """<html>
<body>
<span>The price is</span><span>$<!-- -->90<!-- -->.<!-- -->74</span>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
def test_obfuscations(client, live_server):
set_original_ignore_response()
live_server_setup(live_server)
time.sleep(1)
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
# Give the thread time to pick it up
time.sleep(3)
# Check HTML conversion detected and workd
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True
)
assert b'$90.74' in res.data

View File

@@ -1,104 +0,0 @@
#!/usr/bin/python3
import time
from flask import url_for
from .util import live_server_setup
def set_original_ignore_response():
test_return_data = """<html>
<body>
<p>Some initial text</p>
<p>Which is across multiple lines</p>
<p>So let's see what happens.</p>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
# The same but just re-ordered the text
def set_modified_swapped_lines():
# Re-ordered and with some whitespacing, should get stripped() too.
test_return_data = """<html>
<body>
<p>Some initial text</p>
<p> So let's see what happens.</p>
<p>&nbsp;Which is across multiple lines</p>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
def set_modified_with_trigger_text_response():
test_return_data = """<html>
<body>
<p>Some initial text</p>
<p>So let's see what happens.</p>
<p>and a new line!</p>
<p>Which is across multiple lines</p>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
def test_unique_lines_functionality(client, live_server):
live_server_setup(live_server)
sleep_time_for_fetch_thread = 3
set_original_ignore_response()
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(sleep_time_for_fetch_thread)
# Add our URL to the import page
res = client.post(
url_for("edit_page", uuid="first"),
data={"check_unique_lines": "y",
"url": test_url,
"fetch_backend": "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
assert b'unviewed' not in res.data
# Make a change
set_modified_swapped_lines()
time.sleep(sleep_time_for_fetch_thread)
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
# It should report nothing found (no new 'unviewed' class)
res = client.get(url_for("index"))
assert b'unviewed' not in res.data
# Now set the content which contains the new text and re-ordered existing text
set_modified_with_trigger_text_response()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(sleep_time_for_fetch_thread)
res = client.get(url_for("index"))
assert b'unviewed' in res.data

View File

@@ -86,7 +86,6 @@ def test_check_xpath_filter_utf8(client, live_server):
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(1)
res = client.post(
url_for("edit_page", uuid="first"),
data={"css_filter": filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
@@ -100,68 +99,6 @@ def test_check_xpath_filter_utf8(client, live_server):
assert b'Deleted' in res.data
# Handle utf-8 charset replies https://github.com/dgtlmoon/changedetection.io/pull/613
def test_check_xpath_text_function_utf8(client, live_server):
filter='//item/title/text()'
d='''<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0">
<channel>
<title>rpilocator.com</title>
<link>https://rpilocator.com</link>
<description>Find Raspberry Pi Computers in Stock</description>
<lastBuildDate>Thu, 19 May 2022 23:27:30 GMT</lastBuildDate>

<item>
<title>Stock Alert (UK): RPi CM4</title>
<foo>something else unrelated</foo>
</item>
<item>
<title>Stock Alert (UK): Big monitor</title>
<foo>something else unrelated</foo>
</item>
</channel>
</rss>'''
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(d)
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True, content_type="application/rss+xml;charset=UTF-8")
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(1)
res = client.post(
url_for("edit_page", uuid="first"),
data={"css_filter": filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
time.sleep(3)
res = client.get(url_for("index"))
assert b'Unicode strings with encoding declaration are not supported.' not in res.data
# The service should echo back the request headers
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True
)
assert b'<div class="">Stock Alert (UK): RPi CM4' in res.data
assert b'<div class="">Stock Alert (UK): Big monitor' in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_check_markup_xpath_filter_restriction(client, live_server):
sleep_time_for_fetch_thread = 3

View File

@@ -2,8 +2,6 @@
from flask import make_response, request
from flask import url_for
import logging
import time
def set_original_response():
test_return_data = """<html>
@@ -70,31 +68,6 @@ def extract_api_key_from_UI(client):
api_key = m.group(1)
return api_key.strip()
# kinda funky, but works for now
def extract_UUID_from_client(client):
import re
res = client.get(
url_for("index"),
)
# <span id="api-key">{{api_key}}</span>
m = re.search('edit/(.+?)"', str(res.data))
uuid = m.group(1)
return uuid.strip()
def wait_for_all_checks(client):
# Loop waiting until done..
attempt=0
while attempt < 60:
time.sleep(1)
res = client.get(url_for("index"))
if not b'Checking now' in res.data:
break
logging.getLogger().info("Waiting for watch-list to not say 'Checking now'.. {}".format(attempt))
attempt += 1
def live_server_setup(live_server):
@live_server.app.route('/test-endpoint')
@@ -160,4 +133,3 @@ def live_server_setup(live_server):
return ret
live_server.start()

View File

@@ -1,2 +0,0 @@
"""Tests for the app."""

View File

@@ -1,3 +0,0 @@
#!/usr/bin/python3
from .. import conftest

View File

@@ -1,54 +0,0 @@
#!/usr/bin/python3
import time
from flask import url_for
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
# Add a site in paused mode, add an invalid filter, we should still have visual selector data ready
def test_visual_selector_content_ready(client, live_server):
import os
import json
assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
live_server_setup(live_server)
time.sleep(1)
# Add our URL to the import page, because the docker container (playwright/selenium) wont be able to connect to our usual test url
test_url = "https://changedetection.io/ci-test/test-runjs.html"
res = client.post(
url_for("form_quick_watch_add"),
data={"url": test_url, "tag": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
follow_redirects=True
)
assert b"Watch added in Paused state, saving will unpause" in res.data
res = client.post(
url_for("edit_page", uuid="first", unpause_on_save=1),
data={
"url": test_url,
"tag": "",
"headers": "",
'fetch_backend': "html_webdriver",
'webdriver_js_execute_code': 'document.querySelector("button[name=test-button]").click();'
},
follow_redirects=True
)
assert b"unpaused" in res.data
time.sleep(1)
wait_for_all_checks(client)
uuid = extract_UUID_from_client(client)
# Check the JS execute code before extract worked
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True
)
assert b'I smell JavaScript' in res.data
assert os.path.isfile(os.path.join('test-datastore', uuid, 'last-screenshot.png')), "last-screenshot.png should exist"
assert os.path.isfile(os.path.join('test-datastore', uuid, 'elements.json')), "xpath elements.json data should exist"
# Open it and see if it roughly looks correct
with open(os.path.join('test-datastore', uuid, 'elements.json'), 'r') as f:
json.load(f)

View File

@@ -1,124 +1,24 @@
import os
import threading
import queue
import time
from changedetectionio import content_fetcher
from changedetectionio.html_tools import FilterNotFoundInResponse
# A single update worker
#
# Requests for checking on a single site(watch) from a queue of watches
# (another process inserts watches into the queue that are time-ready for checking)
import logging
import sys
class update_worker(threading.Thread):
current_uuid = None
def __init__(self, q, notification_q, app, datastore, *args, **kwargs):
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
self.q = q
self.app = app
self.notification_q = notification_q
self.datastore = datastore
super().__init__(*args, **kwargs)
def send_content_changed_notification(self, t, watch_uuid):
from changedetectionio import diff
from changedetectionio.notification import (
default_notification_format_for_watch
)
n_object = {}
watch = self.datastore.data['watching'].get(watch_uuid, False)
if not watch:
return
watch_history = watch.history
dates = list(watch_history.keys())
# Theoretically it's possible that this could be just 1 long,
# - In the case that the timestamp key was not unique
if len(dates) == 1:
raise ValueError(
"History index had 2 or more, but only 1 date loaded, timestamps were not unique? maybe two of the same timestamps got written, needs more delay?"
)
n_object['notification_urls'] = watch['notification_urls'] if len(watch['notification_urls']) else \
self.datastore.data['settings']['application']['notification_urls']
n_object['notification_title'] = watch['notification_title'] if watch['notification_title'] else \
self.datastore.data['settings']['application']['notification_title']
n_object['notification_body'] = watch['notification_body'] if watch['notification_body'] else \
self.datastore.data['settings']['application']['notification_body']
n_object['notification_format'] = watch['notification_format'] if watch['notification_format'] != default_notification_format_for_watch else \
self.datastore.data['settings']['application']['notification_format']
# Only prepare to notify if the rules above matched
if 'notification_urls' in n_object and n_object['notification_urls']:
# HTML needs linebreak, but MarkDown and Text can use a linefeed
if n_object['notification_format'] == 'HTML':
line_feed_sep = "</br>"
else:
line_feed_sep = "\n"
with open(watch_history[dates[-1]], 'rb') as f:
snapshot_contents = f.read()
n_object.update({
'watch_url': watch['url'],
'uuid': watch_uuid,
'current_snapshot': snapshot_contents.decode('utf-8'),
'diff': diff.render_diff(watch_history[dates[-2]], watch_history[dates[-1]], line_feed_sep=line_feed_sep),
'diff_full': diff.render_diff(watch_history[dates[-2]], watch_history[dates[-1]], True, line_feed_sep=line_feed_sep)
})
logging.info (">> SENDING NOTIFICATION")
self.notification_q.put(n_object)
else:
logging.info (">> NO Notification sent, notification_url was empty in both watch and system")
def send_filter_failure_notification(self, watch_uuid):
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts')
watch = self.datastore.data['watching'].get(watch_uuid, False)
if not watch:
return
n_object = {'notification_title': 'Changedetection.io - Alert - CSS/xPath filter was not present in the page',
'notification_body': "Your configured CSS/xPath filter of '{}' for {{watch_url}} did not appear on the page after {} attempts, did the page change layout?\n\nLink: {{base_url}}/edit/{{watch_uuid}}\n\nThanks - Your omniscient changedetection.io installation :)\n".format(
watch['css_filter'],
threshold),
'notification_format': 'text'}
if len(watch['notification_urls']):
n_object['notification_urls'] = watch['notification_urls']
elif len(self.datastore.data['settings']['application']['notification_urls']):
n_object['notification_urls'] = self.datastore.data['settings']['application']['notification_urls']
# Only prepare to notify if the rules above matched
if 'notification_urls' in n_object:
n_object.update({
'watch_url': watch['url'],
'uuid': watch_uuid
})
self.notification_q.put(n_object)
print("Sent filter not found notification for {}".format(watch_uuid))
def cleanup_error_artifacts(self, uuid):
# All went fine, remove error artifacts
cleanup_files = ["last-error-screenshot.png", "last-error.txt"]
for f in cleanup_files:
full_path = os.path.join(self.datastore.datastore_path, uuid, f)
if os.path.isfile(full_path):
os.unlink(full_path)
def run(self):
from changedetectionio import fetch_site_status
@@ -127,7 +27,7 @@ class update_worker(threading.Thread):
while not self.app.config.exit.is_set():
try:
priority, uuid = self.q.get(block=False)
uuid = self.q.get(block=False)
except queue.Empty:
pass
@@ -135,17 +35,17 @@ class update_worker(threading.Thread):
self.current_uuid = uuid
if uuid in list(self.datastore.data['watching'].keys()):
changed_detected = False
contents = b''
contents = ""
screenshot = False
update_obj= {}
xpath_data = False
process_changedetection_results = True
print("> Processing UUID {} Priority {} URL {}".format(uuid, priority, self.datastore.data['watching'][uuid]['url']))
now = time.time()
try:
changed_detected, update_obj, contents = update_handler.run(uuid)
changed_detected, update_obj, contents, screenshot, xpath_data = update_handler.run(uuid)
# Re #342
# In Python 3, all strings are sequences of Unicode characters. There is a bytes type that holds raw bytes.
# We then convert/.decode('utf-8') for the notification etc
@@ -153,108 +53,33 @@ class update_worker(threading.Thread):
raise Exception("Error - returned data from the fetch handler SHOULD be bytes")
except PermissionError as e:
self.app.logger.error("File permission error updating", uuid, str(e))
process_changedetection_results = False
except content_fetcher.ReplyWithContentButNoText as e:
# Totally fine, it's by choice - just continue on, nothing more to care about
# Page had elements/content but no renderable text
# Backend (not filters) gave zero output
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': "Got HTML content but no text found (With {} reply code).".format(e.status_code)})
if e.screenshot:
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=e.screenshot)
process_changedetection_results = False
except content_fetcher.Non200ErrorCodeReceived as e:
if e.status_code == 403:
err_text = "Error - 403 (Access denied) received"
elif e.status_code == 404:
err_text = "Error - 404 (Page not found) received"
elif e.status_code == 500:
err_text = "Error - 500 (Internal server Error) received"
if self.datastore.data['watching'][uuid].get('css_filter'):
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': "Got HTML content but no text found (CSS / xPath Filter not found in page?)"})
else:
err_text = "Error - Request returned a HTTP error code {}".format(str(e.status_code))
if e.screenshot:
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=e.screenshot, as_error=True)
if e.xpath_data:
self.datastore.save_xpath_data(watch_uuid=uuid, data=e.xpath_data, as_error=True)
if e.page_text:
self.datastore.save_error_text(watch_uuid=uuid, contents=e.page_text)
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
# So that we get a trigger when the content is added again
'previous_md5': ''})
process_changedetection_results = False
except FilterNotFoundInResponse as e:
if not self.datastore.data['watching'].get(uuid):
continue
err_text = "Warning, filter '{}' not found".format(str(e))
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
# So that we get a trigger when the content is added again
'previous_md5': ''})
# Only when enabled, send the notification
if self.datastore.data['watching'][uuid].get('filter_failure_notification_send', False):
c = self.datastore.data['watching'][uuid].get('consecutive_filter_failures', 5)
c += 1
# Send notification if we reached the threshold?
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts',
0)
print("Filter for {} not found, consecutive_filter_failures: {}".format(uuid, c))
if threshold > 0 and c >= threshold:
if not self.datastore.data['watching'][uuid].get('notification_muted'):
self.send_filter_failure_notification(uuid)
c = 0
self.datastore.update_watch(uuid=uuid, update_obj={'consecutive_filter_failures': c})
process_changedetection_results = True
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': "Got HTML content but no text found."})
pass
except content_fetcher.EmptyReply as e:
# Some kind of custom to-str handler in the exception handler that does this?
err_text = "EmptyReply - try increasing 'Wait seconds before extracting text', Status Code {}".format(e.status_code)
err_text = "EmptyReply: Status Code {}".format(e.status_code)
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code})
except content_fetcher.ScreenshotUnavailable as e:
err_text = "Screenshot unavailable, page did not render fully in the expected time - try increasing 'Wait seconds before extracting text'"
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code})
process_changedetection_results = False
except content_fetcher.JSActionExceptions as e:
err_text = "Error running JS Actions - Page request - "+e.message
if e.screenshot:
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=e.screenshot, as_error=True)
err_text = "Screenshot unavailable, page did not render fully in the expected time"
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code})
except content_fetcher.PageUnloadable as e:
err_text = "Page request from server didnt respond correctly"
if e.message:
err_text = "{} - {}".format(err_text, e.message)
if e.screenshot:
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=e.screenshot, as_error=True)
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code})
except Exception as e:
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
# Other serious error
process_changedetection_results = False
else:
# Crash protection, the watch entry could have been removed by this point (during a slow chrome fetch etc)
if not self.datastore.data['watching'].get(uuid):
continue
# Mark that we never had any failures
if not self.datastore.data['watching'][uuid].get('ignore_status_codes'):
update_obj['consecutive_filter_failures'] = 0
self.cleanup_error_artifacts(uuid)
# Different exceptions mean that we may or may not want to bump the snapshot, trigger notifications etc
if process_changedetection_results:
try:
watch = self.datastore.data['watching'][uuid]
fname = "" # Saved history text filename
@@ -262,19 +87,59 @@ class update_worker(threading.Thread):
# For the FIRST time we check a site, or a change detected, save the snapshot.
if changed_detected or not watch['last_checked']:
# A change was detected
watch.save_history_text(contents=contents, timestamp=str(round(time.time())))
fname = watch.save_history_text(contents=contents, timestamp=str(round(time.time())))
# Generally update anything interesting returned
self.datastore.update_watch(uuid=uuid, update_obj=update_obj)
# A change was detected
if changed_detected:
n_object = {}
print (">> Change detected in UUID {} - {}".format(uuid, watch['url']))
# Notifications should only trigger on the second time (first time, we gather the initial snapshot)
if watch.history_n >= 2:
if not self.datastore.data['watching'][uuid].get('notification_muted'):
self.send_content_changed_notification(self, watch_uuid=uuid)
dates = list(watch.history.keys())
prev_fname = watch.history[dates[-2]]
# Did it have any notification alerts to hit?
if len(watch['notification_urls']):
print(">>> Notifications queued for UUID from watch {}".format(uuid))
n_object['notification_urls'] = watch['notification_urls']
n_object['notification_title'] = watch['notification_title']
n_object['notification_body'] = watch['notification_body']
n_object['notification_format'] = watch['notification_format']
# No? maybe theres a global setting, queue them all
elif len(self.datastore.data['settings']['application']['notification_urls']):
print(">>> Watch notification URLs were empty, using GLOBAL notifications for UUID: {}".format(uuid))
n_object['notification_urls'] = self.datastore.data['settings']['application']['notification_urls']
n_object['notification_title'] = self.datastore.data['settings']['application']['notification_title']
n_object['notification_body'] = self.datastore.data['settings']['application']['notification_body']
n_object['notification_format'] = self.datastore.data['settings']['application']['notification_format']
else:
print(">>> NO notifications queued, watch and global notification URLs were empty.")
# Only prepare to notify if the rules above matched
if 'notification_urls' in n_object:
# HTML needs linebreak, but MarkDown and Text can use a linefeed
if n_object['notification_format'] == 'HTML':
line_feed_sep = "</br>"
else:
line_feed_sep = "\n"
from changedetectionio import diff
n_object.update({
'watch_url': watch['url'],
'uuid': uuid,
'current_snapshot': contents.decode('utf-8'),
'diff': diff.render_diff(prev_fname, fname, line_feed_sep=line_feed_sep),
'diff_full': diff.render_diff(prev_fname, fname, True, line_feed_sep=line_feed_sep)
})
self.notification_q.put(n_object)
except Exception as e:
# Catch everything possible here, so that if a worker crashes, we don't lose it until restart!
@@ -282,16 +147,15 @@ class update_worker(threading.Thread):
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
# Always record that we atleast tried
self.datastore.update_watch(uuid=uuid, update_obj={'fetch_time': round(time.time() - now, 3),
'last_checked': round(time.time())})
# Always save the screenshot if it's available
if update_handler.screenshot:
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=update_handler.screenshot)
if update_handler.xpath_data:
self.datastore.save_xpath_data(watch_uuid=uuid, data=update_handler.xpath_data)
finally:
# Always record that we atleast tried
self.datastore.update_watch(uuid=uuid, update_obj={'fetch_time': round(time.time() - now, 3),
'last_checked': round(time.time())})
# Always save the screenshot if it's available
if screenshot:
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=screenshot)
if xpath_data:
self.datastore.save_xpath_data(watch_uuid=uuid, data=xpath_data)
self.current_uuid = None # Done

View File

@@ -6,8 +6,6 @@ services:
hostname: changedetection
volumes:
- changedetection-data:/datastore
# Configurable proxy list support, see https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#proxy-list-support
# - ./proxies.json:/datastore/proxies.json
# environment:
# Default listening port, can also be changed with the -p option
@@ -26,13 +24,13 @@ services:
# https://selenium-python.readthedocs.io/api.html#module-selenium.webdriver.common.proxy
#
# Alternative Playwright URL, do not use "'s or 's!
# - PLAYWRIGHT_DRIVER_URL=ws://playwright-chrome:3000/?stealth=1&--disable-web-security=true
# - PLAYWRIGHT_DRIVER_URL=ws://playwright-chrome:3000/
#
# Playwright proxy settings playwright_proxy_server, playwright_proxy_bypass, playwright_proxy_username, playwright_proxy_password
#
# https://playwright.dev/python/docs/api/class-browsertype#browser-type-launch-option-proxy
#
# Plain requests - proxy support example.
# Plain requsts - proxy support example.
# - HTTP_PROXY=socks5h://10.10.1.10:1080
# - HTTPS_PROXY=socks5h://10.10.1.10:1080
#
@@ -45,9 +43,6 @@ services:
# Respect proxy_pass type settings, `proxy_set_header Host "localhost";` and `proxy_set_header X-Forwarded-Prefix /app;`
# More here https://github.com/dgtlmoon/changedetection.io/wiki/Running-changedetection.io-behind-a-reverse-proxy-sub-directory
# - USE_X_SETTINGS=1
#
# Hides the `Referer` header so that monitored websites can't see the changedetection.io hostname.
# - HIDE_REFERER=true
# Comment out ports: when using behind a reverse proxy , enable networks: etc.
ports:
@@ -78,17 +73,6 @@ services:
# hostname: playwright-chrome
# image: browserless/chrome
# restart: unless-stopped
# environment:
# - SCREEN_WIDTH=1920
# - SCREEN_HEIGHT=1024
# - SCREEN_DEPTH=16
# - ENABLE_DEBUGGER=false
# - PREBOOT_CHROME=true
# - CONNECTION_TIMEOUT=300000
# - MAX_CONCURRENT_SESSIONS=10
# - CHROME_REFRESH_TIME=600000
# - DEFAULT_BLOCK_ADS=true
# - DEFAULT_STEALTH=true
volumes:
changedetection-data:

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 209 KiB

After

Width:  |  Height:  |  Size: 190 KiB

View File

@@ -1,8 +1,8 @@
flask ~= 2.0
flask~= 2.0
flask_wtf
eventlet >= 0.31.0
eventlet>=0.31.0
validators
timeago ~= 1.0
timeago ~=1.0
inscriptis ~= 2.2
feedgen ~= 0.9
flask-login ~= 0.5
@@ -10,20 +10,15 @@ flask_restful
pytz
# Set these versions together to avoid a RequestsDependencyWarning
# >= 2.26 also adds Brotli support if brotli is installed
brotli ~= 1.0
requests[socks] ~= 2.28
requests[socks] ~= 2.26
urllib3 > 1.26
chardet > 2.3.0
wtforms ~= 3.0
jsonpath-ng ~= 1.5.3
# jq not available on Windows so must be installed manually
# Notification library
apprise ~= 1.1.0
apprise ~= 0.9.9
# apprise mqtt https://github.com/dgtlmoon/changedetection.io/issues/315
paho-mqtt
@@ -47,4 +42,3 @@ selenium ~= 4.1.0
werkzeug ~= 2.0.0
# playwright is installed at Dockerfile build time because it's not available on all platforms