mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-11-13 13:06:10 +00:00
Compare commits
31 Commits
bugfix-del
...
1725-linux
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3fe5d0b84e | ||
|
|
6ef8a1c18f | ||
|
|
227cd615cd | ||
|
|
126f0fbf87 | ||
|
|
cfa712c88c | ||
|
|
6a6ba40b6a | ||
|
|
e7f726c057 | ||
|
|
df0cc7b585 | ||
|
|
76cd98b521 | ||
|
|
f84ba0fb31 | ||
|
|
c35cbd33d6 | ||
|
|
661f7fe32c | ||
|
|
7cb7eebbc5 | ||
|
|
aaceb4ebad | ||
|
|
56cf6e5ea5 | ||
|
|
1987e109e8 | ||
|
|
20d65cdd26 | ||
|
|
37ff5f6d37 | ||
|
|
2f777ea3bb | ||
|
|
e709201955 | ||
|
|
572f71299f | ||
|
|
5f150c4f03 | ||
|
|
8cbf8e8f57 | ||
|
|
0e65dda5b6 | ||
|
|
72a415144b | ||
|
|
52f2c00308 | ||
|
|
72311fb845 | ||
|
|
f1b10a22f8 | ||
|
|
a4c620c308 | ||
|
|
9434eac72d | ||
|
|
edb5e20de6 |
@@ -1,2 +1,18 @@
|
|||||||
.git
|
.git
|
||||||
.github
|
.github
|
||||||
|
changedetectionio/processors/__pycache__
|
||||||
|
changedetectionio/api/__pycache__
|
||||||
|
changedetectionio/model/__pycache__
|
||||||
|
changedetectionio/blueprint/price_data_follower/__pycache__
|
||||||
|
changedetectionio/blueprint/tags/__pycache__
|
||||||
|
changedetectionio/blueprint/__pycache__
|
||||||
|
changedetectionio/blueprint/browser_steps/__pycache__
|
||||||
|
changedetectionio/fetchers/__pycache__
|
||||||
|
changedetectionio/tests/visualselector/__pycache__
|
||||||
|
changedetectionio/tests/restock/__pycache__
|
||||||
|
changedetectionio/tests/__pycache__
|
||||||
|
changedetectionio/tests/fetchers/__pycache__
|
||||||
|
changedetectionio/tests/unit/__pycache__
|
||||||
|
changedetectionio/tests/proxy_list/__pycache__
|
||||||
|
changedetectionio/__pycache__
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/containers.yml
vendored
4
.github/workflows/containers.yml
vendored
@@ -95,7 +95,7 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
tags: |
|
tags: |
|
||||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev
|
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||||
cache-from: type=local,src=/tmp/.buildx-cache
|
cache-from: type=local,src=/tmp/.buildx-cache
|
||||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||||
# Looks like this was disabled
|
# Looks like this was disabled
|
||||||
@@ -115,7 +115,7 @@ jobs:
|
|||||||
ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }}
|
ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }}
|
||||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest
|
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest
|
||||||
ghcr.io/dgtlmoon/changedetection.io:latest
|
ghcr.io/dgtlmoon/changedetection.io:latest
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||||
cache-from: type=local,src=/tmp/.buildx-cache
|
cache-from: type=local,src=/tmp/.buildx-cache
|
||||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||||
# Looks like this was disabled
|
# Looks like this was disabled
|
||||||
|
|||||||
2
.github/workflows/test-container-build.yml
vendored
2
.github/workflows/test-container-build.yml
vendored
@@ -62,7 +62,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
context: ./
|
context: ./
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
platforms: linux/arm/v7,linux/arm/v6,linux/amd64,linux/arm64,
|
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||||
cache-from: type=local,src=/tmp/.buildx-cache
|
cache-from: type=local,src=/tmp/.buildx-cache
|
||||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||||
|
|
||||||
|
|||||||
15
.github/workflows/test-only.yml
vendored
15
.github/workflows/test-only.yml
vendored
@@ -36,10 +36,16 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
# Build a changedetection.io container and start testing inside
|
# Build a changedetection.io container and start testing inside
|
||||||
docker build . -t test-changedetectionio
|
docker build . -t test-changedetectionio
|
||||||
|
# Debug info
|
||||||
|
docker run test-changedetectionio bash -c 'pip list'
|
||||||
|
|
||||||
|
- name: Spin up ancillary SMTP+Echo message test server
|
||||||
|
run: |
|
||||||
|
# Debug SMTP server/echo message back server
|
||||||
|
docker run --network changedet-network -d -p 11025:11025 -p 11080:11080 --hostname mailserver test-changedetectionio bash -c 'python changedetectionio/tests/smtp/smtp-test-server.py'
|
||||||
|
|
||||||
- name: Test built container with pytest
|
- name: Test built container with pytest
|
||||||
run: |
|
run: |
|
||||||
|
|
||||||
# Unit tests
|
# Unit tests
|
||||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_notification_diff'
|
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_notification_diff'
|
||||||
|
|
||||||
@@ -58,11 +64,16 @@ jobs:
|
|||||||
# Settings headers playwright tests - Call back in from Browserless, check headers
|
# Settings headers playwright tests - Call back in from Browserless, check headers
|
||||||
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||||
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "WEBDRIVER_URL=http://selenium:4444/wd/hub" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "WEBDRIVER_URL=http://selenium:4444/wd/hub" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||||
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||||
|
|
||||||
# restock detection via playwright - added name=changedet here so that playwright/browserless can connect to it
|
# restock detection via playwright - added name=changedet here so that playwright/browserless can connect to it
|
||||||
docker run --rm --name "changedet" -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-port=5004 --live-server-host=0.0.0.0 tests/restock/test_restock.py'
|
docker run --rm --name "changedet" -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-port=5004 --live-server-host=0.0.0.0 tests/restock/test_restock.py'
|
||||||
|
|
||||||
|
- name: Test SMTP notification mime types
|
||||||
|
run: |
|
||||||
|
# SMTP content types - needs the 'Debug SMTP server/echo message back server' container from above
|
||||||
|
docker run --rm --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/smtp/test_notification_smtp.py'
|
||||||
|
|
||||||
- name: Test with puppeteer fetcher and disk cache
|
- name: Test with puppeteer fetcher and disk cache
|
||||||
run: |
|
run: |
|
||||||
docker run --rm -e "PUPPETEER_DISK_CACHE=/tmp/data/" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
|
docker run --rm -e "PUPPETEER_DISK_CACHE=/tmp/data/" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# pip dependencies install stage
|
# pip dependencies install stage
|
||||||
FROM python:3.10-slim as builder
|
FROM python:3.10-slim-bullseye as builder
|
||||||
|
|
||||||
# See `cryptography` pin comment in requirements.txt
|
# See `cryptography` pin comment in requirements.txt
|
||||||
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
|
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
|
||||||
@@ -29,7 +29,7 @@ RUN pip install --target=/dependencies playwright~=1.27.1 \
|
|||||||
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
|
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
|
||||||
|
|
||||||
# Final image stage
|
# Final image stage
|
||||||
FROM python:3.10-slim
|
FROM python:3.10-slim-bullseye
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
libssl1.1 \
|
libssl1.1 \
|
||||||
|
|||||||
@@ -13,3 +13,6 @@ include changedetection.py
|
|||||||
global-exclude *.pyc
|
global-exclude *.pyc
|
||||||
global-exclude node_modules
|
global-exclude node_modules
|
||||||
global-exclude venv
|
global-exclude venv
|
||||||
|
|
||||||
|
global-exclude test-datastore
|
||||||
|
global-exclude changedetection.io*dist-info
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ from flask_paginate import Pagination, get_page_parameter
|
|||||||
from changedetectionio import html_tools
|
from changedetectionio import html_tools
|
||||||
from changedetectionio.api import api_v1
|
from changedetectionio.api import api_v1
|
||||||
|
|
||||||
__version__ = '0.42.2'
|
__version__ = '0.44.1'
|
||||||
|
|
||||||
datastore = None
|
datastore = None
|
||||||
|
|
||||||
@@ -317,25 +317,21 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
return "Access denied, bad token", 403
|
return "Access denied, bad token", 403
|
||||||
|
|
||||||
from . import diff
|
from . import diff
|
||||||
limit_tag = request.args.get('tag')
|
limit_tag = request.args.get('tag', '').lower().strip()
|
||||||
|
# Be sure limit_tag is a uuid
|
||||||
|
for uuid, tag in datastore.data['settings']['application'].get('tags', {}).items():
|
||||||
|
if limit_tag == tag.get('title', '').lower().strip():
|
||||||
|
limit_tag = uuid
|
||||||
|
|
||||||
# Sort by last_changed and add the uuid which is usually the key..
|
# Sort by last_changed and add the uuid which is usually the key..
|
||||||
sorted_watches = []
|
sorted_watches = []
|
||||||
|
|
||||||
# @todo needs a .itemsWithTag() or something - then we can use that in Jinaj2 and throw this away
|
# @todo needs a .itemsWithTag() or something - then we can use that in Jinaj2 and throw this away
|
||||||
for uuid, watch in datastore.data['watching'].items():
|
for uuid, watch in datastore.data['watching'].items():
|
||||||
|
if limit_tag and not limit_tag in watch['tags']:
|
||||||
if limit_tag != None:
|
continue
|
||||||
# Support for comma separated list of tags.
|
watch['uuid'] = uuid
|
||||||
for tag_in_watch in watch['tag'].split(','):
|
sorted_watches.append(watch)
|
||||||
tag_in_watch = tag_in_watch.strip()
|
|
||||||
if tag_in_watch == limit_tag:
|
|
||||||
watch['uuid'] = uuid
|
|
||||||
sorted_watches.append(watch)
|
|
||||||
|
|
||||||
else:
|
|
||||||
watch['uuid'] = uuid
|
|
||||||
sorted_watches.append(watch)
|
|
||||||
|
|
||||||
sorted_watches.sort(key=lambda x: x.last_changed, reverse=False)
|
sorted_watches.sort(key=lambda x: x.last_changed, reverse=False)
|
||||||
|
|
||||||
@@ -392,9 +388,17 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
@app.route("/", methods=['GET'])
|
@app.route("/", methods=['GET'])
|
||||||
@login_optionally_required
|
@login_optionally_required
|
||||||
def index():
|
def index():
|
||||||
|
global datastore
|
||||||
from changedetectionio import forms
|
from changedetectionio import forms
|
||||||
|
|
||||||
limit_tag = request.args.get('tag')
|
limit_tag = request.args.get('tag', '').lower().strip()
|
||||||
|
|
||||||
|
# Be sure limit_tag is a uuid
|
||||||
|
for uuid, tag in datastore.data['settings']['application'].get('tags', {}).items():
|
||||||
|
if limit_tag == tag.get('title', '').lower().strip():
|
||||||
|
limit_tag = uuid
|
||||||
|
|
||||||
|
|
||||||
# Redirect for the old rss path which used the /?rss=true
|
# Redirect for the old rss path which used the /?rss=true
|
||||||
if request.args.get('rss'):
|
if request.args.get('rss'):
|
||||||
return redirect(url_for('rss', tag=limit_tag))
|
return redirect(url_for('rss', tag=limit_tag))
|
||||||
@@ -414,30 +418,15 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
sorted_watches = []
|
sorted_watches = []
|
||||||
search_q = request.args.get('q').strip().lower() if request.args.get('q') else False
|
search_q = request.args.get('q').strip().lower() if request.args.get('q') else False
|
||||||
for uuid, watch in datastore.data['watching'].items():
|
for uuid, watch in datastore.data['watching'].items():
|
||||||
|
if limit_tag and not limit_tag in watch['tags']:
|
||||||
if limit_tag:
|
|
||||||
# Support for comma separated list of tags.
|
|
||||||
if not watch.get('tag'):
|
|
||||||
continue
|
continue
|
||||||
for tag_in_watch in watch.get('tag', '').split(','):
|
|
||||||
tag_in_watch = tag_in_watch.strip()
|
|
||||||
if tag_in_watch == limit_tag:
|
|
||||||
watch['uuid'] = uuid
|
|
||||||
if search_q:
|
|
||||||
if (watch.get('title') and search_q in watch.get('title').lower()) or search_q in watch.get('url', '').lower():
|
|
||||||
sorted_watches.append(watch)
|
|
||||||
else:
|
|
||||||
sorted_watches.append(watch)
|
|
||||||
|
|
||||||
else:
|
if search_q:
|
||||||
#watch['uuid'] = uuid
|
if (watch.get('title') and search_q in watch.get('title').lower()) or search_q in watch.get('url', '').lower():
|
||||||
if search_q:
|
|
||||||
if (watch.get('title') and search_q in watch.get('title').lower()) or search_q in watch.get('url', '').lower():
|
|
||||||
sorted_watches.append(watch)
|
|
||||||
else:
|
|
||||||
sorted_watches.append(watch)
|
sorted_watches.append(watch)
|
||||||
|
else:
|
||||||
|
sorted_watches.append(watch)
|
||||||
|
|
||||||
existing_tags = datastore.get_all_tags()
|
|
||||||
form = forms.quickWatchForm(request.form)
|
form = forms.quickWatchForm(request.form)
|
||||||
page = request.args.get(get_page_parameter(), type=int, default=1)
|
page = request.args.get(get_page_parameter(), type=int, default=1)
|
||||||
total_count = len(sorted_watches)
|
total_count = len(sorted_watches)
|
||||||
@@ -452,6 +441,7 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
# Don't link to hosting when we're on the hosting environment
|
# Don't link to hosting when we're on the hosting environment
|
||||||
active_tag=limit_tag,
|
active_tag=limit_tag,
|
||||||
app_rss_token=datastore.data['settings']['application']['rss_access_token'],
|
app_rss_token=datastore.data['settings']['application']['rss_access_token'],
|
||||||
|
datastore=datastore,
|
||||||
form=form,
|
form=form,
|
||||||
guid=datastore.data['app_guid'],
|
guid=datastore.data['app_guid'],
|
||||||
has_proxies=datastore.proxy_list,
|
has_proxies=datastore.proxy_list,
|
||||||
@@ -463,7 +453,7 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
sort_attribute=request.args.get('sort') if request.args.get('sort') else request.cookies.get('sort'),
|
sort_attribute=request.args.get('sort') if request.args.get('sort') else request.cookies.get('sort'),
|
||||||
sort_order=request.args.get('order') if request.args.get('order') else request.cookies.get('order'),
|
sort_order=request.args.get('order') if request.args.get('order') else request.cookies.get('order'),
|
||||||
system_default_fetcher=datastore.data['settings']['application'].get('fetch_backend'),
|
system_default_fetcher=datastore.data['settings']['application'].get('fetch_backend'),
|
||||||
tags=existing_tags,
|
tags=datastore.data['settings']['application'].get('tags'),
|
||||||
watches=sorted_watches
|
watches=sorted_watches
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -606,9 +596,13 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
|
|
||||||
# proxy_override set to the json/text list of the items
|
# proxy_override set to the json/text list of the items
|
||||||
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||||
data=default,
|
data=default
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# For the form widget tag uuid lookup
|
||||||
|
form.tags.datastore = datastore # in _value
|
||||||
|
|
||||||
|
|
||||||
form.fetch_backend.choices.append(("system", 'System settings default'))
|
form.fetch_backend.choices.append(("system", 'System settings default'))
|
||||||
|
|
||||||
# form.browser_steps[0] can be assumed that we 'goto url' first
|
# form.browser_steps[0] can be assumed that we 'goto url' first
|
||||||
@@ -659,6 +653,16 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
extra_update_obj['filter_text_replaced'] = True
|
extra_update_obj['filter_text_replaced'] = True
|
||||||
extra_update_obj['filter_text_removed'] = True
|
extra_update_obj['filter_text_removed'] = True
|
||||||
|
|
||||||
|
# Because wtforms doesn't support accessing other data in process_ , but we convert the CSV list of tags back to a list of UUIDs
|
||||||
|
tag_uuids = []
|
||||||
|
if form.data.get('tags'):
|
||||||
|
# Sometimes in testing this can be list, dont know why
|
||||||
|
if type(form.data.get('tags')) == list:
|
||||||
|
extra_update_obj['tags'] = form.data.get('tags')
|
||||||
|
else:
|
||||||
|
for t in form.data.get('tags').split(','):
|
||||||
|
tag_uuids.append(datastore.add_tag(name=t))
|
||||||
|
extra_update_obj['tags'] = tag_uuids
|
||||||
|
|
||||||
datastore.data['watching'][uuid].update(form.data)
|
datastore.data['watching'][uuid].update(form.data)
|
||||||
datastore.data['watching'][uuid].update(extra_update_obj)
|
datastore.data['watching'][uuid].update(extra_update_obj)
|
||||||
@@ -713,7 +717,7 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
form=form,
|
form=form,
|
||||||
has_default_notification_urls=True if len(datastore.data['settings']['application']['notification_urls']) else False,
|
has_default_notification_urls=True if len(datastore.data['settings']['application']['notification_urls']) else False,
|
||||||
has_empty_checktime=using_default_check_time,
|
has_empty_checktime=using_default_check_time,
|
||||||
has_extra_headers_file=watch.has_extra_headers_file or datastore.has_extra_headers_file,
|
has_extra_headers_file=len(datastore.get_all_headers_in_textfile_for_watch(uuid=uuid)) > 0,
|
||||||
is_html_webdriver=is_html_webdriver,
|
is_html_webdriver=is_html_webdriver,
|
||||||
jq_support=jq_support,
|
jq_support=jq_support,
|
||||||
playwright_enabled=os.getenv('PLAYWRIGHT_DRIVER_URL', False),
|
playwright_enabled=os.getenv('PLAYWRIGHT_DRIVER_URL', False),
|
||||||
@@ -1110,8 +1114,8 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
os.path.join(datastore_o.datastore_path, list_with_tags_file), "w"
|
os.path.join(datastore_o.datastore_path, list_with_tags_file), "w"
|
||||||
) as f:
|
) as f:
|
||||||
for uuid in datastore.data["watching"]:
|
for uuid in datastore.data["watching"]:
|
||||||
url = datastore.data["watching"][uuid]["url"]
|
url = datastore.data["watching"][uuid].get('url')
|
||||||
tag = datastore.data["watching"][uuid]["tag"]
|
tag = datastore.data["watching"][uuid].get('tags', {})
|
||||||
f.write("{} {}\r\n".format(url, tag))
|
f.write("{} {}\r\n".format(url, tag))
|
||||||
|
|
||||||
# Add it to the Zip
|
# Add it to the Zip
|
||||||
@@ -1199,7 +1203,7 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
|
|
||||||
add_paused = request.form.get('edit_and_watch_submit_button') != None
|
add_paused = request.form.get('edit_and_watch_submit_button') != None
|
||||||
processor = request.form.get('processor', 'text_json_diff')
|
processor = request.form.get('processor', 'text_json_diff')
|
||||||
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip(), extras={'paused': add_paused, 'processor': processor})
|
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tags').strip(), extras={'paused': add_paused, 'processor': processor})
|
||||||
|
|
||||||
if new_uuid:
|
if new_uuid:
|
||||||
if add_paused:
|
if add_paused:
|
||||||
@@ -1267,9 +1271,11 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
elif tag != None:
|
elif tag != None:
|
||||||
# Items that have this current tag
|
# Items that have this current tag
|
||||||
for watch_uuid, watch in datastore.data['watching'].items():
|
for watch_uuid, watch in datastore.data['watching'].items():
|
||||||
if (tag != None and tag in watch['tag']):
|
if (tag != None and tag in watch.get('tags', {})):
|
||||||
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
||||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False}))
|
update_q.put(
|
||||||
|
queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False})
|
||||||
|
)
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -1357,6 +1363,18 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
datastore.data['watching'][uuid.strip()]['notification_format'] = default_notification_format_for_watch
|
datastore.data['watching'][uuid.strip()]['notification_format'] = default_notification_format_for_watch
|
||||||
flash("{} watches set to use default notification settings".format(len(uuids)))
|
flash("{} watches set to use default notification settings".format(len(uuids)))
|
||||||
|
|
||||||
|
elif (op == 'assign-tag'):
|
||||||
|
op_extradata = request.form.get('op_extradata', '').strip()
|
||||||
|
if op_extradata:
|
||||||
|
tag_uuid = datastore.add_tag(name=op_extradata)
|
||||||
|
if op_extradata and tag_uuid:
|
||||||
|
for uuid in uuids:
|
||||||
|
uuid = uuid.strip()
|
||||||
|
if datastore.data['watching'].get(uuid):
|
||||||
|
datastore.data['watching'][uuid]['tags'].append(tag_uuid)
|
||||||
|
|
||||||
|
flash("{} watches assigned tag".format(len(uuids)))
|
||||||
|
|
||||||
return redirect(url_for('index'))
|
return redirect(url_for('index'))
|
||||||
|
|
||||||
@app.route("/api/share-url", methods=['GET'])
|
@app.route("/api/share-url", methods=['GET'])
|
||||||
@@ -1366,7 +1384,6 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
the share-link can be imported/added"""
|
the share-link can be imported/added"""
|
||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
tag = request.args.get('tag')
|
|
||||||
uuid = request.args.get('uuid')
|
uuid = request.args.get('uuid')
|
||||||
|
|
||||||
# more for testing
|
# more for testing
|
||||||
@@ -1419,6 +1436,12 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
import changedetectionio.blueprint.price_data_follower as price_data_follower
|
import changedetectionio.blueprint.price_data_follower as price_data_follower
|
||||||
app.register_blueprint(price_data_follower.construct_blueprint(datastore, update_q), url_prefix='/price_data_follower')
|
app.register_blueprint(price_data_follower.construct_blueprint(datastore, update_q), url_prefix='/price_data_follower')
|
||||||
|
|
||||||
|
import changedetectionio.blueprint.tags as tags
|
||||||
|
app.register_blueprint(tags.construct_blueprint(datastore), url_prefix='/tags')
|
||||||
|
|
||||||
|
import changedetectionio.blueprint.check_proxies as check_proxies
|
||||||
|
app.register_blueprint(check_proxies.construct_blueprint(datastore=datastore), url_prefix='/check_proxy')
|
||||||
|
|
||||||
|
|
||||||
# @todo handle ctrl break
|
# @todo handle ctrl break
|
||||||
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
|
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
import os
|
||||||
|
from distutils.util import strtobool
|
||||||
|
|
||||||
from flask_expects_json import expects_json
|
from flask_expects_json import expects_json
|
||||||
from changedetectionio import queuedWatchMetaData
|
from changedetectionio import queuedWatchMetaData
|
||||||
from flask_restful import abort, Resource
|
from flask_restful import abort, Resource
|
||||||
@@ -33,7 +36,7 @@ class Watch(Resource):
|
|||||||
@auth.check_token
|
@auth.check_token
|
||||||
def get(self, uuid):
|
def get(self, uuid):
|
||||||
"""
|
"""
|
||||||
@api {get} /api/v1/watch/:uuid Get a single watch data
|
@api {get} /api/v1/watch/:uuid Single watch - get data, recheck, pause, mute.
|
||||||
@apiDescription Retrieve watch information and set muted/paused status
|
@apiDescription Retrieve watch information and set muted/paused status
|
||||||
@apiExample {curl} Example usage:
|
@apiExample {curl} Example usage:
|
||||||
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -H"x-api-key:813031b16330fe25e3780cf0325daa45"
|
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -H"x-api-key:813031b16330fe25e3780cf0325daa45"
|
||||||
@@ -209,7 +212,9 @@ class CreateWatch(Resource):
|
|||||||
json_data = request.get_json()
|
json_data = request.get_json()
|
||||||
url = json_data['url'].strip()
|
url = json_data['url'].strip()
|
||||||
|
|
||||||
if not validators.url(json_data['url'].strip()):
|
# If hosts that only contain alphanumerics are allowed ("localhost" for example)
|
||||||
|
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
|
||||||
|
if not validators.url(url, simple_host=allow_simplehost):
|
||||||
return "Invalid or unsupported URL", 400
|
return "Invalid or unsupported URL", 400
|
||||||
|
|
||||||
if json_data.get('proxy'):
|
if json_data.get('proxy'):
|
||||||
@@ -218,9 +223,16 @@ class CreateWatch(Resource):
|
|||||||
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
|
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
|
||||||
|
|
||||||
extras = copy.deepcopy(json_data)
|
extras = copy.deepcopy(json_data)
|
||||||
|
|
||||||
|
# Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API)
|
||||||
|
tags = None
|
||||||
|
if extras.get('tag'):
|
||||||
|
tags = extras.get('tag')
|
||||||
|
del extras['tag']
|
||||||
|
|
||||||
del extras['url']
|
del extras['url']
|
||||||
|
|
||||||
new_uuid = self.datastore.add_watch(url=url, extras=extras)
|
new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags)
|
||||||
if new_uuid:
|
if new_uuid:
|
||||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
|
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
|
||||||
return {'uuid': new_uuid}, 201
|
return {'uuid': new_uuid}, 201
|
||||||
@@ -259,13 +271,16 @@ class CreateWatch(Resource):
|
|||||||
"""
|
"""
|
||||||
list = {}
|
list = {}
|
||||||
|
|
||||||
tag_limit = request.args.get('tag', None)
|
tag_limit = request.args.get('tag', '').lower()
|
||||||
for k, watch in self.datastore.data['watching'].items():
|
|
||||||
if tag_limit:
|
|
||||||
if not tag_limit.lower() in watch.all_tags:
|
|
||||||
continue
|
|
||||||
|
|
||||||
list[k] = {'url': watch['url'],
|
|
||||||
|
for uuid, watch in self.datastore.data['watching'].items():
|
||||||
|
# Watch tags by name (replace the other calls?)
|
||||||
|
tags = self.datastore.get_all_tags_for_watch(uuid=uuid)
|
||||||
|
if tag_limit and not any(v.get('title').lower() == tag_limit for k, v in tags.items()):
|
||||||
|
continue
|
||||||
|
|
||||||
|
list[uuid] = {'url': watch['url'],
|
||||||
'title': watch['title'],
|
'title': watch['title'],
|
||||||
'last_checked': watch['last_checked'],
|
'last_checked': watch['last_checked'],
|
||||||
'last_changed': watch.last_changed,
|
'last_changed': watch.last_changed,
|
||||||
|
|||||||
116
changedetectionio/blueprint/check_proxies/__init__.py
Normal file
116
changedetectionio/blueprint/check_proxies/__init__.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
from flask import Blueprint
|
||||||
|
from flask_login import login_required
|
||||||
|
|
||||||
|
from changedetectionio.processors import text_json_diff
|
||||||
|
from changedetectionio.store import ChangeDetectionStore
|
||||||
|
|
||||||
|
|
||||||
|
STATUS_CHECKING = 0
|
||||||
|
STATUS_FAILED = 1
|
||||||
|
STATUS_OK = 2
|
||||||
|
THREADPOOL_MAX_WORKERS = 3
|
||||||
|
_DEFAULT_POOL = ThreadPoolExecutor(max_workers=THREADPOOL_MAX_WORKERS)
|
||||||
|
|
||||||
|
|
||||||
|
# Maybe use fetch-time if its >5 to show some expected load time?
|
||||||
|
def threadpool(f, executor=None):
|
||||||
|
@wraps(f)
|
||||||
|
def wrap(*args, **kwargs):
|
||||||
|
return (executor or _DEFAULT_POOL).submit(f, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrap
|
||||||
|
|
||||||
|
|
||||||
|
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||||
|
check_proxies_blueprint = Blueprint('check_proxies', __name__)
|
||||||
|
checks_in_progress = {}
|
||||||
|
|
||||||
|
@threadpool
|
||||||
|
def long_task(uuid, preferred_proxy):
|
||||||
|
import time
|
||||||
|
from changedetectionio import content_fetcher
|
||||||
|
|
||||||
|
status = {'status': '', 'length': 0, 'text': ''}
|
||||||
|
from jinja2 import Environment, BaseLoader
|
||||||
|
|
||||||
|
contents = ''
|
||||||
|
now = time.time()
|
||||||
|
try:
|
||||||
|
update_handler = text_json_diff.perform_site_check(datastore=datastore)
|
||||||
|
changed_detected, update_obj, contents = update_handler.run(uuid, preferred_proxy=preferred_proxy, skip_when_checksum_same=False)
|
||||||
|
# title, size is len contents not len xfer
|
||||||
|
except content_fetcher.Non200ErrorCodeReceived as e:
|
||||||
|
if e.status_code == 404:
|
||||||
|
status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but 404 (page not found)"})
|
||||||
|
elif e.status_code == 403 or e.status_code == 401:
|
||||||
|
status.update({'status': 'ERROR', 'length': len(contents), 'text': f"{e.status_code} - Access denied"})
|
||||||
|
else:
|
||||||
|
status.update({'status': 'ERROR', 'length': len(contents), 'text': f"Status code: {e.status_code}"})
|
||||||
|
except text_json_diff.FilterNotFoundInResponse:
|
||||||
|
status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but CSS/xPath filter not found (page changed layout?)"})
|
||||||
|
except content_fetcher.EmptyReply as e:
|
||||||
|
if e.status_code == 403 or e.status_code == 401:
|
||||||
|
status.update({'status': 'ERROR OTHER', 'length': len(contents), 'text': f"Got empty reply with code {e.status_code} - Access denied"})
|
||||||
|
else:
|
||||||
|
status.update({'status': 'ERROR OTHER', 'length': len(contents) if contents else 0, 'text': f"Empty reply with code {e.status_code}, needs chrome?"})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
status.update({'status': 'ERROR OTHER', 'length': len(contents) if contents else 0, 'text': 'Error: '+str(e)})
|
||||||
|
else:
|
||||||
|
status.update({'status': 'OK', 'length': len(contents), 'text': ''})
|
||||||
|
|
||||||
|
if status.get('text'):
|
||||||
|
status['text'] = Environment(loader=BaseLoader()).from_string('{{text|e}}').render({'text': status['text']})
|
||||||
|
|
||||||
|
status['time'] = "{:.2f}s".format(time.time() - now)
|
||||||
|
|
||||||
|
return status
|
||||||
|
|
||||||
|
def _recalc_check_status(uuid):
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
for k, v in checks_in_progress.get(uuid, {}).items():
|
||||||
|
try:
|
||||||
|
r_1 = v.result(timeout=0.05)
|
||||||
|
except Exception as e:
|
||||||
|
# If timeout error?
|
||||||
|
results[k] = {'status': 'RUNNING'}
|
||||||
|
|
||||||
|
else:
|
||||||
|
results[k] = r_1
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
@login_required
|
||||||
|
@check_proxies_blueprint.route("/<string:uuid>/status", methods=['GET'])
|
||||||
|
def get_recheck_status(uuid):
|
||||||
|
results = _recalc_check_status(uuid=uuid)
|
||||||
|
return results
|
||||||
|
|
||||||
|
@login_required
|
||||||
|
@check_proxies_blueprint.route("/<string:uuid>/start", methods=['GET'])
|
||||||
|
def start_check(uuid):
|
||||||
|
|
||||||
|
if not datastore.proxy_list:
|
||||||
|
return
|
||||||
|
|
||||||
|
if checks_in_progress.get(uuid):
|
||||||
|
state = _recalc_check_status(uuid=uuid)
|
||||||
|
for proxy_key, v in state.items():
|
||||||
|
if v.get('status') == 'RUNNING':
|
||||||
|
return state
|
||||||
|
else:
|
||||||
|
checks_in_progress[uuid] = {}
|
||||||
|
|
||||||
|
for k, v in datastore.proxy_list.items():
|
||||||
|
if not checks_in_progress[uuid].get(k):
|
||||||
|
checks_in_progress[uuid][k] = long_task(uuid=uuid, preferred_proxy=k)
|
||||||
|
|
||||||
|
results = _recalc_check_status(uuid=uuid)
|
||||||
|
return results
|
||||||
|
|
||||||
|
return check_proxies_blueprint
|
||||||
9
changedetectionio/blueprint/tags/README.md
Normal file
9
changedetectionio/blueprint/tags/README.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Groups tags
|
||||||
|
|
||||||
|
## How it works
|
||||||
|
|
||||||
|
Watch has a list() of tag UUID's, which relate to a config under application.settings.tags
|
||||||
|
|
||||||
|
The 'tag' is actually a watch, because they basically will eventually share 90% of the same config.
|
||||||
|
|
||||||
|
So a tag is like an abstract of a watch
|
||||||
141
changedetectionio/blueprint/tags/__init__.py
Normal file
141
changedetectionio/blueprint/tags/__init__.py
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
from flask import Blueprint, request, make_response, render_template, flash, url_for, redirect
|
||||||
|
from changedetectionio.store import ChangeDetectionStore
|
||||||
|
from changedetectionio import login_optionally_required
|
||||||
|
|
||||||
|
|
||||||
|
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||||
|
tags_blueprint = Blueprint('tags', __name__, template_folder="templates")
|
||||||
|
|
||||||
|
@tags_blueprint.route("/list", methods=['GET'])
|
||||||
|
@login_optionally_required
|
||||||
|
def tags_overview_page():
|
||||||
|
from .form import SingleTag
|
||||||
|
add_form = SingleTag(request.form)
|
||||||
|
output = render_template("groups-overview.html",
|
||||||
|
form=add_form,
|
||||||
|
available_tags=datastore.data['settings']['application'].get('tags', {}),
|
||||||
|
)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
@tags_blueprint.route("/add", methods=['POST'])
|
||||||
|
@login_optionally_required
|
||||||
|
def form_tag_add():
|
||||||
|
from .form import SingleTag
|
||||||
|
add_form = SingleTag(request.form)
|
||||||
|
|
||||||
|
if not add_form.validate():
|
||||||
|
for widget, l in add_form.errors.items():
|
||||||
|
flash(','.join(l), 'error')
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
title = request.form.get('name').strip()
|
||||||
|
|
||||||
|
if datastore.tag_exists_by_name(title):
|
||||||
|
flash(f'The tag "{title}" already exists', "error")
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
datastore.add_tag(title)
|
||||||
|
flash("Tag added")
|
||||||
|
|
||||||
|
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
@tags_blueprint.route("/mute/<string:uuid>", methods=['GET'])
|
||||||
|
@login_optionally_required
|
||||||
|
def mute(uuid):
|
||||||
|
if datastore.data['settings']['application']['tags'].get(uuid):
|
||||||
|
datastore.data['settings']['application']['tags'][uuid]['notification_muted'] = not datastore.data['settings']['application']['tags'][uuid]['notification_muted']
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
@tags_blueprint.route("/delete/<string:uuid>", methods=['GET'])
|
||||||
|
@login_optionally_required
|
||||||
|
def delete(uuid):
|
||||||
|
removed = 0
|
||||||
|
# Delete the tag, and any tag reference
|
||||||
|
if datastore.data['settings']['application']['tags'].get(uuid):
|
||||||
|
del datastore.data['settings']['application']['tags'][uuid]
|
||||||
|
|
||||||
|
for watch_uuid, watch in datastore.data['watching'].items():
|
||||||
|
if watch.get('tags') and uuid in watch['tags']:
|
||||||
|
removed += 1
|
||||||
|
watch['tags'].remove(uuid)
|
||||||
|
|
||||||
|
flash(f"Tag deleted and removed from {removed} watches")
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
@tags_blueprint.route("/unlink/<string:uuid>", methods=['GET'])
|
||||||
|
@login_optionally_required
|
||||||
|
def unlink(uuid):
|
||||||
|
unlinked = 0
|
||||||
|
for watch_uuid, watch in datastore.data['watching'].items():
|
||||||
|
if watch.get('tags') and uuid in watch['tags']:
|
||||||
|
unlinked += 1
|
||||||
|
watch['tags'].remove(uuid)
|
||||||
|
|
||||||
|
flash(f"Tag unlinked removed from {unlinked} watches")
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
@tags_blueprint.route("/delete_all", methods=['GET'])
|
||||||
|
@login_optionally_required
|
||||||
|
def delete_all():
|
||||||
|
for watch_uuid, watch in datastore.data['watching'].items():
|
||||||
|
watch['tags'] = []
|
||||||
|
datastore.data['settings']['application']['tags'] = {}
|
||||||
|
|
||||||
|
flash(f"All tags deleted")
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
@tags_blueprint.route("/edit/<string:uuid>", methods=['GET'])
|
||||||
|
@login_optionally_required
|
||||||
|
def form_tag_edit(uuid):
|
||||||
|
from changedetectionio import forms
|
||||||
|
|
||||||
|
if uuid == 'first':
|
||||||
|
uuid = list(datastore.data['settings']['application']['tags'].keys()).pop()
|
||||||
|
|
||||||
|
default = datastore.data['settings']['application']['tags'].get(uuid)
|
||||||
|
|
||||||
|
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||||
|
data=default,
|
||||||
|
)
|
||||||
|
form.datastore=datastore # needed?
|
||||||
|
|
||||||
|
output = render_template("edit-tag.html",
|
||||||
|
data=default,
|
||||||
|
form=form,
|
||||||
|
settings_application=datastore.data['settings']['application'],
|
||||||
|
)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
@tags_blueprint.route("/edit/<string:uuid>", methods=['POST'])
|
||||||
|
@login_optionally_required
|
||||||
|
def form_tag_edit_submit(uuid):
|
||||||
|
from changedetectionio import forms
|
||||||
|
if uuid == 'first':
|
||||||
|
uuid = list(datastore.data['settings']['application']['tags'].keys()).pop()
|
||||||
|
|
||||||
|
default = datastore.data['settings']['application']['tags'].get(uuid)
|
||||||
|
|
||||||
|
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||||
|
data=default,
|
||||||
|
)
|
||||||
|
# @todo subclass form so validation works
|
||||||
|
#if not form.validate():
|
||||||
|
# for widget, l in form.errors.items():
|
||||||
|
# flash(','.join(l), 'error')
|
||||||
|
# return redirect(url_for('tags.form_tag_edit_submit', uuid=uuid))
|
||||||
|
|
||||||
|
datastore.data['settings']['application']['tags'][uuid].update(form.data)
|
||||||
|
datastore.needs_write_urgent = True
|
||||||
|
flash("Updated")
|
||||||
|
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
|
||||||
|
@tags_blueprint.route("/delete/<string:uuid>", methods=['GET'])
|
||||||
|
def form_tag_delete(uuid):
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
return tags_blueprint
|
||||||
22
changedetectionio/blueprint/tags/form.py
Normal file
22
changedetectionio/blueprint/tags/form.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
from wtforms import (
|
||||||
|
BooleanField,
|
||||||
|
Form,
|
||||||
|
IntegerField,
|
||||||
|
RadioField,
|
||||||
|
SelectField,
|
||||||
|
StringField,
|
||||||
|
SubmitField,
|
||||||
|
TextAreaField,
|
||||||
|
validators,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class SingleTag(Form):
|
||||||
|
|
||||||
|
name = StringField('Tag name', [validators.InputRequired()], render_kw={"placeholder": "Name"})
|
||||||
|
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
131
changedetectionio/blueprint/tags/templates/edit-tag.html
Normal file
131
changedetectionio/blueprint/tags/templates/edit-tag.html
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% block content %}
|
||||||
|
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
||||||
|
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||||
|
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||||
|
<script>
|
||||||
|
|
||||||
|
/*{% if emailprefix %}*/
|
||||||
|
/*const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');*/
|
||||||
|
/*{% endif %}*/
|
||||||
|
|
||||||
|
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script>
|
||||||
|
<!--<script src="{{url_for('static_content', group='js', filename='limit.js')}}" defer></script>-->
|
||||||
|
<script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||||
|
|
||||||
|
<div class="edit-form monospaced-textarea">
|
||||||
|
|
||||||
|
<div class="tabs collapsable">
|
||||||
|
<ul>
|
||||||
|
<li class="tab" id=""><a href="#general">General</a></li>
|
||||||
|
<li class="tab"><a href="#filters-and-triggers">Filters & Triggers</a></li>
|
||||||
|
<li class="tab"><a href="#notifications">Notifications</a></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="box-wrap inner">
|
||||||
|
<form class="pure-form pure-form-stacked"
|
||||||
|
action="{{ url_for('tags.form_tag_edit', uuid=data.uuid) }}" method="POST">
|
||||||
|
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||||
|
|
||||||
|
<div class="tab-pane-inner" id="general">
|
||||||
|
<fieldset>
|
||||||
|
<div class="pure-control-group">
|
||||||
|
{{ render_field(form.title, placeholder="https://...", required=true, class="m-d") }}
|
||||||
|
</div>
|
||||||
|
</fieldset>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="tab-pane-inner" id="filters-and-triggers">
|
||||||
|
<div class="pure-control-group">
|
||||||
|
{% set field = render_field(form.include_filters,
|
||||||
|
rows=5,
|
||||||
|
placeholder="#example
|
||||||
|
xpath://body/div/span[contains(@class, 'example-class')]",
|
||||||
|
class="m-d")
|
||||||
|
%}
|
||||||
|
{{ field }}
|
||||||
|
{% if '/text()' in field %}
|
||||||
|
<span class="pure-form-message-inline"><strong>Note!: //text() function does not work where the <element> contains <![CDATA[]]></strong></span><br>
|
||||||
|
{% endif %}
|
||||||
|
<span class="pure-form-message-inline">One rule per line, <i>any</i> rules that matches will be used.<br>
|
||||||
|
|
||||||
|
<ul>
|
||||||
|
<li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li>
|
||||||
|
<li>JSON - Limit text to this JSON rule, using either <a href="https://pypi.org/project/jsonpath-ng/" target="new">JSONPath</a> or <a href="https://stedolan.github.io/jq/" target="new">jq</a> (if installed).
|
||||||
|
<ul>
|
||||||
|
<li>JSONPath: Prefix with <code>json:</code>, use <code>json:$</code> to force re-formatting if required, <a href="https://jsonpath.com/" target="new">test your JSONPath here</a>.</li>
|
||||||
|
{% if jq_support %}
|
||||||
|
<li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>.</li>
|
||||||
|
{% else %}
|
||||||
|
<li>jq support not installed</li>
|
||||||
|
{% endif %}
|
||||||
|
</ul>
|
||||||
|
</li>
|
||||||
|
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash,
|
||||||
|
<ul>
|
||||||
|
<li>Example: <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
|
||||||
|
href="http://xpather.com/" target="new">test your XPath here</a></li>
|
||||||
|
<li>Example: Get all titles from an RSS feed <code>//title/text()</code></li>
|
||||||
|
</ul>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
Please be sure that you thoroughly understand how to write CSS, JSONPath, XPath{% if jq_support %}, or jq selector{%endif%} rules before filing an issue on GitHub! <a
|
||||||
|
href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<fieldset class="pure-control-group">
|
||||||
|
{{ render_field(form.subtractive_selectors, rows=5, placeholder="header
|
||||||
|
footer
|
||||||
|
nav
|
||||||
|
.stockticker") }}
|
||||||
|
<span class="pure-form-message-inline">
|
||||||
|
<ul>
|
||||||
|
<li> Remove HTML element(s) by CSS selector before text conversion. </li>
|
||||||
|
<li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li>
|
||||||
|
</ul>
|
||||||
|
</span>
|
||||||
|
</fieldset>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="tab-pane-inner" id="notifications">
|
||||||
|
<fieldset>
|
||||||
|
<div class="pure-control-group inline-radio">
|
||||||
|
{{ render_checkbox_field(form.notification_muted) }}
|
||||||
|
</div>
|
||||||
|
{% if is_html_webdriver %}
|
||||||
|
<div class="pure-control-group inline-radio">
|
||||||
|
{{ render_checkbox_field(form.notification_screenshot) }}
|
||||||
|
<span class="pure-form-message-inline">
|
||||||
|
<strong>Use with caution!</strong> This will easily fill up your email storage quota or flood other storages.
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<div class="field-group" id="notification-field-group">
|
||||||
|
{% if has_default_notification_urls %}
|
||||||
|
<div class="inline-warning">
|
||||||
|
<img class="inline-warning-icon" src="{{url_for('static_content', group='images', filename='notice.svg')}}" alt="Look out!" title="Lookout!" >
|
||||||
|
There are <a href="{{ url_for('settings_page')}}#notifications">system-wide notification URLs enabled</a>, this form will override notification settings for this watch only ‐ an empty Notification URL list here will still send notifications.
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<a href="#notifications" id="notification-setting-reset-to-default" class="pure-button button-xsmall" style="right: 20px; top: 20px; position: absolute; background-color: #5f42dd; border-radius: 4px; font-size: 70%; color: #fff">Use system defaults</a>
|
||||||
|
|
||||||
|
{{ render_common_settings_form(form, emailprefix, settings_application) }}
|
||||||
|
</div>
|
||||||
|
</fieldset>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="actions">
|
||||||
|
<div class="pure-control-group">
|
||||||
|
{{ render_button(form.save_button) }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
@@ -0,0 +1,60 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% block content %}
|
||||||
|
{% from '_helpers.jinja' import render_simple_field, render_field %}
|
||||||
|
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
||||||
|
|
||||||
|
<div class="box">
|
||||||
|
<form class="pure-form" action="{{ url_for('tags.form_tag_add') }}" method="POST" id="new-watch-form">
|
||||||
|
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||||
|
<fieldset>
|
||||||
|
<legend>Add a new organisational tag</legend>
|
||||||
|
<div id="watch-add-wrapper-zone">
|
||||||
|
<div>
|
||||||
|
{{ render_simple_field(form.name, placeholder="watch label / tag") }}
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
{{ render_simple_field(form.save_button, title="Save" ) }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<br>
|
||||||
|
<div style="color: #fff;">Groups allows you to manage filters and notifications for multiple watches under a single organisational tag.</div>
|
||||||
|
</fieldset>
|
||||||
|
</form>
|
||||||
|
<!-- @todo maybe some overview matrix, 'tick' with which has notification, filter rules etc -->
|
||||||
|
<div id="watch-table-wrapper">
|
||||||
|
|
||||||
|
<table class="pure-table pure-table-striped watch-table group-overview-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th></th>
|
||||||
|
<th>Tag / Label name</th>
|
||||||
|
<th></th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<!--
|
||||||
|
@Todo - connect Last checked, Last Changed, Number of Watches etc
|
||||||
|
--->
|
||||||
|
{% if not available_tags|length %}
|
||||||
|
<tr>
|
||||||
|
<td colspan="3">No website organisational tags/groups configured</td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
|
{% for uuid, tag in available_tags.items() %}
|
||||||
|
<tr id="{{ uuid }}" class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }}">
|
||||||
|
<td class="watch-controls">
|
||||||
|
<a class="link-mute state-{{'on' if tag.notification_muted else 'off'}}" href="{{url_for('tags.mute', uuid=tag.uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications" class="icon icon-mute" ></a>
|
||||||
|
</td>
|
||||||
|
<td class="title-col inline">{{tag.title}}</td>
|
||||||
|
<td>
|
||||||
|
<a class="pure-button pure-button-primary" href="{{ url_for('tags.form_tag_edit', uuid=uuid) }}">Edit</a>
|
||||||
|
<a class="pure-button pure-button-primary" href="{{ url_for('tags.delete', uuid=uuid) }}" title="Deletes and removes tag">Delete</a>
|
||||||
|
<a class="pure-button pure-button-primary" href="{{ url_for('tags.unlink', uuid=uuid) }}" title="Keep the tag but unlink any watches">Unlink</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
@@ -201,7 +201,8 @@ class Fetcher():
|
|||||||
dest = os.path.join(self.browser_steps_screenshot_path, 'step_*.jpeg')
|
dest = os.path.join(self.browser_steps_screenshot_path, 'step_*.jpeg')
|
||||||
files = glob.glob(dest)
|
files = glob.glob(dest)
|
||||||
for f in files:
|
for f in files:
|
||||||
os.unlink(f)
|
if os.path.isfile(f):
|
||||||
|
os.unlink(f)
|
||||||
|
|
||||||
|
|
||||||
# Maybe for the future, each fetcher provides its own diff output, could be used for text, image
|
# Maybe for the future, each fetcher provides its own diff output, could be used for text, image
|
||||||
|
|||||||
@@ -54,4 +54,5 @@ def render_diff(previous_version_file_contents, newest_version_file_contents, in
|
|||||||
|
|
||||||
# Recursively join lists
|
# Recursively join lists
|
||||||
f = lambda L: line_feed_sep.join([f(x) if type(x) is list else x for x in L])
|
f = lambda L: line_feed_sep.join([f(x) if type(x) is list else x for x in L])
|
||||||
return f(rendered_diff)
|
p= f(rendered_diff)
|
||||||
|
return p
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
from distutils.util import strtobool
|
||||||
|
|
||||||
from wtforms import (
|
from wtforms import (
|
||||||
BooleanField,
|
BooleanField,
|
||||||
@@ -28,6 +29,8 @@ from changedetectionio.notification import (
|
|||||||
|
|
||||||
from wtforms.fields import FormField
|
from wtforms.fields import FormField
|
||||||
|
|
||||||
|
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
|
||||||
|
|
||||||
valid_method = {
|
valid_method = {
|
||||||
'GET',
|
'GET',
|
||||||
'POST',
|
'POST',
|
||||||
@@ -90,6 +93,29 @@ class SaltyPasswordField(StringField):
|
|||||||
else:
|
else:
|
||||||
self.data = False
|
self.data = False
|
||||||
|
|
||||||
|
class StringTagUUID(StringField):
|
||||||
|
|
||||||
|
# process_formdata(self, valuelist) handled manually in POST handler
|
||||||
|
|
||||||
|
# Is what is shown when field <input> is rendered
|
||||||
|
def _value(self):
|
||||||
|
# Tag UUID to name, on submit it will convert it back (in the submit handler of init.py)
|
||||||
|
if self.data and type(self.data) is list:
|
||||||
|
tag_titles = []
|
||||||
|
for i in self.data:
|
||||||
|
tag = self.datastore.data['settings']['application']['tags'].get(i)
|
||||||
|
if tag:
|
||||||
|
tag_title = tag.get('title')
|
||||||
|
if tag_title:
|
||||||
|
tag_titles.append(tag_title)
|
||||||
|
|
||||||
|
return ', '.join(tag_titles)
|
||||||
|
|
||||||
|
if not self.data:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
return 'error'
|
||||||
|
|
||||||
class TimeBetweenCheckForm(Form):
|
class TimeBetweenCheckForm(Form):
|
||||||
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||||
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||||
@@ -232,9 +258,10 @@ class validateURL(object):
|
|||||||
|
|
||||||
def __call__(self, form, field):
|
def __call__(self, form, field):
|
||||||
import validators
|
import validators
|
||||||
|
# If hosts that only contain alphanumerics are allowed ("localhost" for example)
|
||||||
|
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
|
||||||
try:
|
try:
|
||||||
validators.url(field.data.strip())
|
validators.url(field.data.strip(), simple_host=allow_simplehost)
|
||||||
except validators.ValidationFailure:
|
except validators.ValidationFailure:
|
||||||
message = field.gettext('\'%s\' is not a valid URL.' % (field.data.strip()))
|
message = field.gettext('\'%s\' is not a valid URL.' % (field.data.strip()))
|
||||||
raise ValidationError(message)
|
raise ValidationError(message)
|
||||||
@@ -347,7 +374,7 @@ class quickWatchForm(Form):
|
|||||||
from . import processors
|
from . import processors
|
||||||
|
|
||||||
url = fields.URLField('URL', validators=[validateURL()])
|
url = fields.URLField('URL', validators=[validateURL()])
|
||||||
tag = StringField('Group tag', [validators.Optional()])
|
tags = StringTagUUID('Group tag', [validators.Optional()])
|
||||||
watch_submit_button = SubmitField('Watch', render_kw={"class": "pure-button pure-button-primary"})
|
watch_submit_button = SubmitField('Watch', render_kw={"class": "pure-button pure-button-primary"})
|
||||||
processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff")
|
processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff")
|
||||||
edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"})
|
edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"})
|
||||||
@@ -355,6 +382,7 @@ class quickWatchForm(Form):
|
|||||||
|
|
||||||
# Common to a single watch and the global settings
|
# Common to a single watch and the global settings
|
||||||
class commonSettingsForm(Form):
|
class commonSettingsForm(Form):
|
||||||
|
|
||||||
notification_urls = StringListField('Notification URL List', validators=[validators.Optional(), ValidateAppRiseServers()])
|
notification_urls = StringListField('Notification URL List', validators=[validators.Optional(), ValidateAppRiseServers()])
|
||||||
notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()])
|
notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()])
|
||||||
notification_body = TextAreaField('Notification Body', default='{{ watch_url }} had a change.', validators=[validators.Optional(), ValidateJinja2Template()])
|
notification_body = TextAreaField('Notification Body', default='{{ watch_url }} had a change.', validators=[validators.Optional(), ValidateJinja2Template()])
|
||||||
@@ -382,7 +410,7 @@ class SingleBrowserStep(Form):
|
|||||||
class watchForm(commonSettingsForm):
|
class watchForm(commonSettingsForm):
|
||||||
|
|
||||||
url = fields.URLField('URL', validators=[validateURL()])
|
url = fields.URLField('URL', validators=[validateURL()])
|
||||||
tag = StringField('Group tag', [validators.Optional()], default='')
|
tags = StringTagUUID('Group tag', [validators.Optional()], default='')
|
||||||
|
|
||||||
time_between_check = FormField(TimeBetweenCheckForm)
|
time_between_check = FormField(TimeBetweenCheckForm)
|
||||||
|
|
||||||
|
|||||||
@@ -85,7 +85,8 @@ class import_distill_io_json(Importer):
|
|||||||
now = time.time()
|
now = time.time()
|
||||||
self.new_uuids=[]
|
self.new_uuids=[]
|
||||||
|
|
||||||
|
# @todo Use JSONSchema like in the API to validate here.
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = json.loads(data.strip())
|
data = json.loads(data.strip())
|
||||||
except json.decoder.JSONDecodeError:
|
except json.decoder.JSONDecodeError:
|
||||||
@@ -120,11 +121,8 @@ class import_distill_io_json(Importer):
|
|||||||
except IndexError:
|
except IndexError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
if d.get('tags', False):
|
|
||||||
extras['tag'] = ", ".join(d['tags'])
|
|
||||||
|
|
||||||
new_uuid = datastore.add_watch(url=d['uri'].strip(),
|
new_uuid = datastore.add_watch(url=d['uri'].strip(),
|
||||||
|
tag=",".join(d.get('tags', [])),
|
||||||
extras=extras,
|
extras=extras,
|
||||||
write_to_disk_now=False)
|
write_to_disk_now=False)
|
||||||
|
|
||||||
|
|||||||
@@ -43,6 +43,7 @@ class model(dict):
|
|||||||
'schema_version' : 0,
|
'schema_version' : 0,
|
||||||
'shared_diff_access': False,
|
'shared_diff_access': False,
|
||||||
'webdriver_delay': None , # Extra delay in seconds before extracting text
|
'webdriver_delay': None , # Extra delay in seconds before extracting text
|
||||||
|
'tags': {} #@todo use Tag.model initialisers
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
19
changedetectionio/model/Tag.py
Normal file
19
changedetectionio/model/Tag.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
from .Watch import base_config
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
class model(dict):
|
||||||
|
|
||||||
|
def __init__(self, *arg, **kw):
|
||||||
|
|
||||||
|
self.update(base_config)
|
||||||
|
|
||||||
|
self['uuid'] = str(uuid.uuid4())
|
||||||
|
|
||||||
|
if kw.get('default'):
|
||||||
|
self.update(kw['default'])
|
||||||
|
del kw['default']
|
||||||
|
|
||||||
|
|
||||||
|
# Goes at the end so we update the default object with the initialiser
|
||||||
|
super(model, self).__init__(*arg, **kw)
|
||||||
|
|
||||||
@@ -52,7 +52,8 @@ base_config = {
|
|||||||
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
|
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
|
||||||
'proxy': None, # Preferred proxy connection
|
'proxy': None, # Preferred proxy connection
|
||||||
'subtractive_selectors': [],
|
'subtractive_selectors': [],
|
||||||
'tag': None,
|
'tag': '', # Old system of text name for a tag, to be removed
|
||||||
|
'tags': [], # list of UUIDs to App.Tags
|
||||||
'text_should_not_be_present': [], # Text that should not present
|
'text_should_not_be_present': [], # Text that should not present
|
||||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||||
# Requires setting to None on submit if it's the same as the default
|
# Requires setting to None on submit if it's the same as the default
|
||||||
@@ -455,10 +456,6 @@ class model(dict):
|
|||||||
|
|
||||||
return csv_output_filename
|
return csv_output_filename
|
||||||
|
|
||||||
@property
|
|
||||||
# Return list of tags, stripped and lowercase, used for searching
|
|
||||||
def all_tags(self):
|
|
||||||
return [s.strip().lower() for s in self.get('tag','').split(',')]
|
|
||||||
|
|
||||||
def has_special_diff_filter_options_set(self):
|
def has_special_diff_filter_options_set(self):
|
||||||
|
|
||||||
@@ -473,40 +470,6 @@ class model(dict):
|
|||||||
# None is set
|
# None is set
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@property
|
|
||||||
def has_extra_headers_file(self):
|
|
||||||
if os.path.isfile(os.path.join(self.watch_data_dir, 'headers.txt')):
|
|
||||||
return True
|
|
||||||
|
|
||||||
for f in self.all_tags:
|
|
||||||
fname = "headers-"+re.sub(r'[\W_]', '', f).lower().strip() + ".txt"
|
|
||||||
filepath = os.path.join(self.__datastore_path, fname)
|
|
||||||
if os.path.isfile(filepath):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_all_headers(self):
|
|
||||||
from .App import parse_headers_from_text_file
|
|
||||||
headers = self.get('headers', {}).copy()
|
|
||||||
# Available headers on the disk could 'headers.txt' in the watch data dir
|
|
||||||
filepath = os.path.join(self.watch_data_dir, 'headers.txt')
|
|
||||||
try:
|
|
||||||
if os.path.isfile(filepath):
|
|
||||||
headers.update(parse_headers_from_text_file(filepath))
|
|
||||||
except Exception as e:
|
|
||||||
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
|
||||||
|
|
||||||
# Or each by tag, as tagname.txt in the main datadir
|
|
||||||
for f in self.all_tags:
|
|
||||||
fname = "headers-"+re.sub(r'[\W_]', '', f).lower().strip() + ".txt"
|
|
||||||
filepath = os.path.join(self.__datastore_path, fname)
|
|
||||||
try:
|
|
||||||
if os.path.isfile(filepath):
|
|
||||||
headers.update(parse_headers_from_text_file(filepath))
|
|
||||||
except Exception as e:
|
|
||||||
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
|
||||||
return headers
|
|
||||||
|
|
||||||
def get_last_fetched_before_filters(self):
|
def get_last_fetched_before_filters(self):
|
||||||
import brotli
|
import brotli
|
||||||
|
|||||||
@@ -92,6 +92,12 @@ def process_notification(n_object, datastore):
|
|||||||
n_object.get('notification_format', default_notification_format),
|
n_object.get('notification_format', default_notification_format),
|
||||||
valid_notification_formats[default_notification_format],
|
valid_notification_formats[default_notification_format],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# If we arrived with 'System default' then look it up
|
||||||
|
if n_format == default_notification_format_for_watch and datastore.data['settings']['application'].get('notification_format') != default_notification_format_for_watch:
|
||||||
|
# Initially text or whatever
|
||||||
|
n_format = datastore.data['settings']['application'].get('notification_format', valid_notification_formats[default_notification_format])
|
||||||
|
|
||||||
|
|
||||||
# https://github.com/caronc/apprise/wiki/Development_LogCapture
|
# https://github.com/caronc/apprise/wiki/Development_LogCapture
|
||||||
# Anything higher than or equal to WARNING (which covers things like Connection errors)
|
# Anything higher than or equal to WARNING (which covers things like Connection errors)
|
||||||
@@ -145,9 +151,12 @@ def process_notification(n_object, datastore):
|
|||||||
# Apprise will default to HTML, so we need to override it
|
# Apprise will default to HTML, so we need to override it
|
||||||
# So that whats' generated in n_body is in line with what is going to be sent.
|
# So that whats' generated in n_body is in line with what is going to be sent.
|
||||||
# https://github.com/caronc/apprise/issues/633#issuecomment-1191449321
|
# https://github.com/caronc/apprise/issues/633#issuecomment-1191449321
|
||||||
if not 'format=' in url and (n_format == 'text' or n_format == 'markdown'):
|
if not 'format=' in url and (n_format == 'Text' or n_format == 'Markdown'):
|
||||||
prefix = '?' if not '?' in url else '&'
|
prefix = '?' if not '?' in url else '&'
|
||||||
|
# Apprise format is lowercase text https://github.com/caronc/apprise/issues/633
|
||||||
|
n_format = n_format.tolower()
|
||||||
url = "{}{}format={}".format(url, prefix, n_format)
|
url = "{}{}format={}".format(url, prefix, n_format)
|
||||||
|
# If n_format == HTML, then apprise email should default to text/html and we should be sending HTML only
|
||||||
|
|
||||||
apobj.add(url)
|
apobj.add(url)
|
||||||
|
|
||||||
@@ -186,8 +195,13 @@ def create_notification_parameters(n_object, datastore):
|
|||||||
uuid = n_object['uuid'] if 'uuid' in n_object else ''
|
uuid = n_object['uuid'] if 'uuid' in n_object else ''
|
||||||
|
|
||||||
if uuid != '':
|
if uuid != '':
|
||||||
watch_title = datastore.data['watching'][uuid]['title']
|
watch_title = datastore.data['watching'][uuid].get('title', '')
|
||||||
watch_tag = datastore.data['watching'][uuid]['tag']
|
tag_list = []
|
||||||
|
tags = datastore.get_all_tags_for_watch(uuid)
|
||||||
|
if tags:
|
||||||
|
for tag_uuid, tag in tags.items():
|
||||||
|
tag_list.append(tag.get('title'))
|
||||||
|
watch_tag = ', '.join(tag_list)
|
||||||
else:
|
else:
|
||||||
watch_title = 'Change Detection'
|
watch_title = 'Change Detection'
|
||||||
watch_tag = ''
|
watch_tag = ''
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ class difference_detection_processor():
|
|||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def run(self, uuid, skip_when_checksum_same=True):
|
def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
|
||||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||||
some_data = 'xxxxx'
|
some_data = 'xxxxx'
|
||||||
update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()
|
update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()
|
||||||
|
|||||||
@@ -42,11 +42,10 @@ class perform_site_check(difference_detection_processor):
|
|||||||
|
|
||||||
# Unset any existing notification error
|
# Unset any existing notification error
|
||||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||||
extra_headers = watch.get('headers', [])
|
|
||||||
|
|
||||||
# Tweak the base config with the per-watch ones
|
request_headers = watch.get('headers', [])
|
||||||
request_headers = deepcopy(self.datastore.data['settings']['headers'])
|
request_headers.update(self.datastore.get_all_base_headers())
|
||||||
request_headers.update(extra_headers)
|
request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=uuid))
|
||||||
|
|
||||||
# https://github.com/psf/requests/issues/4525
|
# https://github.com/psf/requests/issues/4525
|
||||||
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
||||||
|
|||||||
@@ -50,14 +50,13 @@ class perform_site_check(difference_detection_processor):
|
|||||||
|
|
||||||
return regex
|
return regex
|
||||||
|
|
||||||
def run(self, uuid, skip_when_checksum_same=True):
|
def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
|
||||||
changed_detected = False
|
changed_detected = False
|
||||||
screenshot = False # as bytes
|
screenshot = False # as bytes
|
||||||
stripped_text_from_html = ""
|
stripped_text_from_html = ""
|
||||||
|
|
||||||
# DeepCopy so we can be sure we don't accidently change anything by reference
|
# DeepCopy so we can be sure we don't accidently change anything by reference
|
||||||
watch = deepcopy(self.datastore.data['watching'].get(uuid))
|
watch = deepcopy(self.datastore.data['watching'].get(uuid))
|
||||||
|
|
||||||
if not watch:
|
if not watch:
|
||||||
raise Exception("Watch no longer exists.")
|
raise Exception("Watch no longer exists.")
|
||||||
|
|
||||||
@@ -71,9 +70,9 @@ class perform_site_check(difference_detection_processor):
|
|||||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||||
|
|
||||||
# Tweak the base config with the per-watch ones
|
# Tweak the base config with the per-watch ones
|
||||||
extra_headers = watch.get_all_headers()
|
request_headers = watch.get('headers', [])
|
||||||
request_headers = self.datastore.get_all_headers()
|
request_headers.update(self.datastore.get_all_base_headers())
|
||||||
request_headers.update(extra_headers)
|
request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=uuid))
|
||||||
|
|
||||||
# https://github.com/psf/requests/issues/4525
|
# https://github.com/psf/requests/issues/4525
|
||||||
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
||||||
@@ -106,7 +105,11 @@ class perform_site_check(difference_detection_processor):
|
|||||||
# If the klass doesnt exist, just use a default
|
# If the klass doesnt exist, just use a default
|
||||||
klass = getattr(content_fetcher, "html_requests")
|
klass = getattr(content_fetcher, "html_requests")
|
||||||
|
|
||||||
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
if preferred_proxy:
|
||||||
|
proxy_id = preferred_proxy
|
||||||
|
else:
|
||||||
|
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
||||||
|
|
||||||
proxy_url = None
|
proxy_url = None
|
||||||
if proxy_id:
|
if proxy_id:
|
||||||
proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
|
proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
|
||||||
@@ -191,21 +194,23 @@ class perform_site_check(difference_detection_processor):
|
|||||||
|
|
||||||
fetcher.content = fetcher.content.replace('</body>', metadata + '</body>')
|
fetcher.content = fetcher.content.replace('</body>', metadata + '</body>')
|
||||||
|
|
||||||
|
# Better would be if Watch.model could access the global data also
|
||||||
|
# and then use getattr https://docs.python.org/3/reference/datamodel.html#object.__getitem__
|
||||||
|
# https://realpython.com/inherit-python-dict/ instead of doing it procedurely
|
||||||
|
include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='include_filters')
|
||||||
|
include_filters_rule = [*watch.get('include_filters', []), *include_filters_from_tags]
|
||||||
|
|
||||||
include_filters_rule = deepcopy(watch.get('include_filters', []))
|
subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='subtractive_selectors'),
|
||||||
# include_filters_rule = watch['include_filters']
|
*watch.get("subtractive_selectors", []),
|
||||||
subtractive_selectors = watch.get(
|
*self.datastore.data["settings"]["application"].get("global_subtractive_selectors", [])
|
||||||
"subtractive_selectors", []
|
]
|
||||||
) + self.datastore.data["settings"]["application"].get(
|
|
||||||
"global_subtractive_selectors", []
|
|
||||||
)
|
|
||||||
|
|
||||||
# Inject a virtual LD+JSON price tracker rule
|
# Inject a virtual LD+JSON price tracker rule
|
||||||
if watch.get('track_ldjson_price_data', '') == PRICE_DATA_TRACK_ACCEPT:
|
if watch.get('track_ldjson_price_data', '') == PRICE_DATA_TRACK_ACCEPT:
|
||||||
include_filters_rule.append(html_tools.LD_JSON_PRODUCT_OFFER_SELECTOR)
|
include_filters_rule.append(html_tools.LD_JSON_PRODUCT_OFFER_SELECTOR)
|
||||||
|
|
||||||
has_filter_rule = include_filters_rule and len("".join(include_filters_rule).strip())
|
has_filter_rule = len(include_filters_rule) and len(include_filters_rule[0].strip())
|
||||||
has_subtractive_selectors = subtractive_selectors and len(subtractive_selectors[0].strip())
|
has_subtractive_selectors = len(subtractive_selectors) and len(subtractive_selectors[0].strip())
|
||||||
|
|
||||||
if is_json and not has_filter_rule:
|
if is_json and not has_filter_rule:
|
||||||
include_filters_rule.append("json:$")
|
include_filters_rule.append("json:$")
|
||||||
|
|||||||
87
changedetectionio/static/js/recheck-proxy.js
Normal file
87
changedetectionio/static/js/recheck-proxy.js
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
$(function () {
|
||||||
|
/* add container before each proxy location to show status */
|
||||||
|
|
||||||
|
var option_li = $('.fetch-backend-proxy li').filter(function() {
|
||||||
|
return $("input",this)[0].value.length >0;
|
||||||
|
});
|
||||||
|
|
||||||
|
//var option_li = $('.fetch-backend-proxy li');
|
||||||
|
var isActive = false;
|
||||||
|
$(option_li).prepend('<div class="proxy-status"></div>');
|
||||||
|
$(option_li).append('<div class="proxy-timing"></div><div class="proxy-check-details"></div>');
|
||||||
|
|
||||||
|
function set_proxy_check_status(proxy_key, state) {
|
||||||
|
// select input by value name
|
||||||
|
const proxy_li = $('input[value="' + proxy_key + '" ]').parent();
|
||||||
|
if (state['status'] === 'RUNNING') {
|
||||||
|
$('.proxy-status', proxy_li).html('<span class="spinner"></span>');
|
||||||
|
}
|
||||||
|
if (state['status'] === 'OK') {
|
||||||
|
$('.proxy-status', proxy_li).html('<span style="color: green; font-weight: bold" >OK</span>');
|
||||||
|
$('.proxy-check-details', proxy_li).html(state['text']);
|
||||||
|
}
|
||||||
|
if (state['status'] === 'ERROR' || state['status'] === 'ERROR OTHER') {
|
||||||
|
$('.proxy-status', proxy_li).html('<span style="color: red; font-weight: bold" >X</span>');
|
||||||
|
$('.proxy-check-details', proxy_li).html(state['text']);
|
||||||
|
}
|
||||||
|
$('.proxy-timing', proxy_li).html(state['time']);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function pollServer() {
|
||||||
|
if (isActive) {
|
||||||
|
window.setTimeout(function () {
|
||||||
|
$.ajax({
|
||||||
|
url: proxy_recheck_status_url,
|
||||||
|
success: function (data) {
|
||||||
|
var all_done = true;
|
||||||
|
$.each(data, function (proxy_key, state) {
|
||||||
|
set_proxy_check_status(proxy_key, state);
|
||||||
|
if (state['status'] === 'RUNNING') {
|
||||||
|
all_done = false;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (all_done) {
|
||||||
|
console.log("Shutting down poller, all done.")
|
||||||
|
isActive = false;
|
||||||
|
} else {
|
||||||
|
pollServer();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
error: function () {
|
||||||
|
//ERROR HANDLING
|
||||||
|
pollServer();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}, 2000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$('#check-all-proxies').click(function (e) {
|
||||||
|
e.preventDefault()
|
||||||
|
$('body').addClass('proxy-check-active');
|
||||||
|
$('.proxy-check-details').html('');
|
||||||
|
$('.proxy-status').html('<span class="spinner"></span>').fadeIn();
|
||||||
|
$('.proxy-timing').html('');
|
||||||
|
|
||||||
|
// Request start, needs CSRF?
|
||||||
|
$.ajax({
|
||||||
|
type: "GET",
|
||||||
|
url: recheck_proxy_start_url,
|
||||||
|
}).done(function (data) {
|
||||||
|
$.each(data, function (proxy_key, state) {
|
||||||
|
set_proxy_check_status(proxy_key, state['status'])
|
||||||
|
});
|
||||||
|
isActive = true;
|
||||||
|
pollServer();
|
||||||
|
|
||||||
|
}).fail(function (data) {
|
||||||
|
console.log(data);
|
||||||
|
alert('There was an error communicating with the server.');
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
@@ -9,13 +9,7 @@ $(document).ready(function () {
|
|||||||
const htmlElement = document.getElementsByTagName("html");
|
const htmlElement = document.getElementsByTagName("html");
|
||||||
const isDarkMode = htmlElement[0].dataset.darkmode === "true";
|
const isDarkMode = htmlElement[0].dataset.darkmode === "true";
|
||||||
htmlElement[0].dataset.darkmode = !isDarkMode;
|
htmlElement[0].dataset.darkmode = !isDarkMode;
|
||||||
if (isDarkMode) {
|
setCookieValue(!isDarkMode);
|
||||||
button.classList.remove("dark");
|
|
||||||
setCookieValue(false);
|
|
||||||
} else {
|
|
||||||
button.classList.add("dark");
|
|
||||||
setCookieValue(true);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const setCookieValue = (value) => {
|
const setCookieValue = (value) => {
|
||||||
|
|||||||
@@ -1,31 +1,45 @@
|
|||||||
$(function () {
|
$(function () {
|
||||||
// Remove unviewed status when normally clicked
|
// Remove unviewed status when normally clicked
|
||||||
$('.diff-link').click(function () {
|
$('.diff-link').click(function () {
|
||||||
$(this).closest('.unviewed').removeClass('unviewed');
|
$(this).closest('.unviewed').removeClass('unviewed');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$("#checkbox-assign-tag").click(function (e) {
|
||||||
|
$('#op_extradata').val(prompt("Enter a tag name"));
|
||||||
|
});
|
||||||
|
|
||||||
$('.with-share-link > *').click(function () {
|
$('.with-share-link > *').click(function () {
|
||||||
$("#copied-clipboard").remove();
|
$("#copied-clipboard").remove();
|
||||||
|
|
||||||
var range = document.createRange();
|
var range = document.createRange();
|
||||||
var n=$("#share-link")[0];
|
var n = $("#share-link")[0];
|
||||||
range.selectNode(n);
|
range.selectNode(n);
|
||||||
window.getSelection().removeAllRanges();
|
window.getSelection().removeAllRanges();
|
||||||
window.getSelection().addRange(range);
|
window.getSelection().addRange(range);
|
||||||
document.execCommand("copy");
|
document.execCommand("copy");
|
||||||
window.getSelection().removeAllRanges();
|
window.getSelection().removeAllRanges();
|
||||||
|
|
||||||
$('.with-share-link').append('<span style="font-size: 80%; color: #fff;" id="copied-clipboard">Copied to clipboard</span>');
|
$('.with-share-link').append('<span style="font-size: 80%; color: #fff;" id="copied-clipboard">Copied to clipboard</span>');
|
||||||
$("#copied-clipboard").fadeOut(2500, function() {
|
$("#copied-clipboard").fadeOut(2500, function () {
|
||||||
$(this).remove();
|
$(this).remove();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$(".watch-table tr").click(function (event) {
|
||||||
|
var tagName = event.target.tagName.toLowerCase();
|
||||||
|
if (tagName === 'tr' || tagName === 'td') {
|
||||||
|
var x = $('input[type=checkbox]', this);
|
||||||
|
if (x) {
|
||||||
|
$(x).click();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// checkboxes - check all
|
// checkboxes - check all
|
||||||
$("#check-all").click(function (e) {
|
$("#check-all").click(function (e) {
|
||||||
$('input[type=checkbox]').not(this).prop('checked', this.checked);
|
$('input[type=checkbox]').not(this).prop('checked', this.checked);
|
||||||
});
|
});
|
||||||
|
|
||||||
// checkboxes - show/hide buttons
|
// checkboxes - show/hide buttons
|
||||||
$("input[type=checkbox]").click(function (e) {
|
$("input[type=checkbox]").click(function (e) {
|
||||||
if ($('input[type=checkbox]:checked').length) {
|
if ($('input[type=checkbox]:checked').length) {
|
||||||
|
|||||||
25
changedetectionio/static/styles/scss/parts/_darkmode.scss
Normal file
25
changedetectionio/static/styles/scss/parts/_darkmode.scss
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
|
||||||
|
#toggle-light-mode {
|
||||||
|
width: 3rem;
|
||||||
|
/* default */
|
||||||
|
.icon-dark {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
html[data-darkmode="true"] {
|
||||||
|
#toggle-light-mode {
|
||||||
|
.icon-light {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.icon-dark {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -7,6 +7,7 @@ ul#requests-extra_proxies {
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* each proxy entry is a `table` */
|
/* each proxy entry is a `table` */
|
||||||
table {
|
table {
|
||||||
tr {
|
tr {
|
||||||
@@ -15,3 +16,30 @@ ul#requests-extra_proxies {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#request {
|
||||||
|
/* Auto proxy scan/checker */
|
||||||
|
label[for=proxy] {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
body.proxy-check-active {
|
||||||
|
#request {
|
||||||
|
.proxy-status {
|
||||||
|
width: 2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.proxy-check-details {
|
||||||
|
font-size: 80%;
|
||||||
|
color: #555;
|
||||||
|
display: block;
|
||||||
|
padding-left: 4em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.proxy-timing {
|
||||||
|
font-size: 80%;
|
||||||
|
padding-left: 1rem;
|
||||||
|
color: var(--color-link);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,6 +8,7 @@
|
|||||||
@import "parts/_pagination";
|
@import "parts/_pagination";
|
||||||
@import "parts/_spinners";
|
@import "parts/_spinners";
|
||||||
@import "parts/_variables";
|
@import "parts/_variables";
|
||||||
|
@import "parts/_darkmode";
|
||||||
|
|
||||||
body {
|
body {
|
||||||
color: var(--color-text);
|
color: var(--color-text);
|
||||||
@@ -54,22 +55,6 @@ a.github-link {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#toggle-light-mode {
|
|
||||||
width: 3rem;
|
|
||||||
.icon-dark {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
&.dark {
|
|
||||||
.icon-light {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.icon-dark {
|
|
||||||
display: block;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#toggle-search {
|
#toggle-search {
|
||||||
width: 2rem;
|
width: 2rem;
|
||||||
|
|||||||
@@ -95,6 +95,25 @@ ul#requests-extra_proxies {
|
|||||||
ul#requests-extra_proxies table tr {
|
ul#requests-extra_proxies table tr {
|
||||||
display: inline; }
|
display: inline; }
|
||||||
|
|
||||||
|
#request {
|
||||||
|
/* Auto proxy scan/checker */ }
|
||||||
|
#request label[for=proxy] {
|
||||||
|
display: inline-block; }
|
||||||
|
|
||||||
|
body.proxy-check-active #request .proxy-status {
|
||||||
|
width: 2em; }
|
||||||
|
|
||||||
|
body.proxy-check-active #request .proxy-check-details {
|
||||||
|
font-size: 80%;
|
||||||
|
color: #555;
|
||||||
|
display: block;
|
||||||
|
padding-left: 4em; }
|
||||||
|
|
||||||
|
body.proxy-check-active #request .proxy-timing {
|
||||||
|
font-size: 80%;
|
||||||
|
padding-left: 1rem;
|
||||||
|
color: var(--color-link); }
|
||||||
|
|
||||||
.pagination-page-info {
|
.pagination-page-info {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
font-size: 0.85rem;
|
font-size: 0.85rem;
|
||||||
@@ -297,6 +316,18 @@ html[data-darkmode="true"] {
|
|||||||
html[data-darkmode="true"] .watch-table .unviewed.error {
|
html[data-darkmode="true"] .watch-table .unviewed.error {
|
||||||
color: var(--color-watch-table-error); }
|
color: var(--color-watch-table-error); }
|
||||||
|
|
||||||
|
#toggle-light-mode {
|
||||||
|
width: 3rem;
|
||||||
|
/* default */ }
|
||||||
|
#toggle-light-mode .icon-dark {
|
||||||
|
display: none; }
|
||||||
|
|
||||||
|
html[data-darkmode="true"] #toggle-light-mode .icon-light {
|
||||||
|
display: none; }
|
||||||
|
|
||||||
|
html[data-darkmode="true"] #toggle-light-mode .icon-dark {
|
||||||
|
display: block; }
|
||||||
|
|
||||||
body {
|
body {
|
||||||
color: var(--color-text);
|
color: var(--color-text);
|
||||||
background: var(--color-background-page); }
|
background: var(--color-background-page); }
|
||||||
@@ -331,15 +362,6 @@ a.github-link {
|
|||||||
a.github-link:hover {
|
a.github-link:hover {
|
||||||
color: var(--color-icon-github-hover); }
|
color: var(--color-icon-github-hover); }
|
||||||
|
|
||||||
#toggle-light-mode {
|
|
||||||
width: 3rem; }
|
|
||||||
#toggle-light-mode .icon-dark {
|
|
||||||
display: none; }
|
|
||||||
#toggle-light-mode.dark .icon-light {
|
|
||||||
display: none; }
|
|
||||||
#toggle-light-mode.dark .icon-dark {
|
|
||||||
display: block; }
|
|
||||||
|
|
||||||
#toggle-search {
|
#toggle-search {
|
||||||
width: 2rem; }
|
width: 2rem; }
|
||||||
|
|
||||||
|
|||||||
@@ -16,6 +16,8 @@ import threading
|
|||||||
import time
|
import time
|
||||||
import uuid as uuid_builder
|
import uuid as uuid_builder
|
||||||
|
|
||||||
|
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
|
||||||
|
|
||||||
# Is there an existing library to ensure some data store (JSON etc) is in sync with CRUD methods?
|
# Is there an existing library to ensure some data store (JSON etc) is in sync with CRUD methods?
|
||||||
# Open a github issue if you know something :)
|
# Open a github issue if you know something :)
|
||||||
# https://stackoverflow.com/questions/6190468/how-to-trigger-function-on-value-change
|
# https://stackoverflow.com/questions/6190468/how-to-trigger-function-on-value-change
|
||||||
@@ -178,20 +180,6 @@ class ChangeDetectionStore:
|
|||||||
|
|
||||||
return self.__data
|
return self.__data
|
||||||
|
|
||||||
def get_all_tags(self):
|
|
||||||
tags = []
|
|
||||||
for uuid, watch in self.data['watching'].items():
|
|
||||||
if watch['tag'] is None:
|
|
||||||
continue
|
|
||||||
# Support for comma separated list of tags.
|
|
||||||
for tag in watch['tag'].split(','):
|
|
||||||
tag = tag.strip()
|
|
||||||
if tag not in tags:
|
|
||||||
tags.append(tag)
|
|
||||||
|
|
||||||
tags.sort()
|
|
||||||
return tags
|
|
||||||
|
|
||||||
# Delete a single watch by UUID
|
# Delete a single watch by UUID
|
||||||
def delete(self, uuid):
|
def delete(self, uuid):
|
||||||
import pathlib
|
import pathlib
|
||||||
@@ -204,22 +192,22 @@ class ChangeDetectionStore:
|
|||||||
# GitHub #30 also delete history records
|
# GitHub #30 also delete history records
|
||||||
for uuid in self.data['watching']:
|
for uuid in self.data['watching']:
|
||||||
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
|
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
|
||||||
shutil.rmtree(path)
|
if os.path.exists(path):
|
||||||
self.needs_write_urgent = True
|
shutil.rmtree(path)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
|
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
|
||||||
shutil.rmtree(path)
|
if os.path.exists(path):
|
||||||
|
shutil.rmtree(path)
|
||||||
del self.data['watching'][uuid]
|
del self.data['watching'][uuid]
|
||||||
|
|
||||||
self.needs_write_urgent = True
|
self.needs_write_urgent = True
|
||||||
|
|
||||||
# Clone a watch by UUID
|
# Clone a watch by UUID
|
||||||
def clone(self, uuid):
|
def clone(self, uuid):
|
||||||
url = self.data['watching'][uuid]['url']
|
url = self.data['watching'][uuid].get('url')
|
||||||
tag = self.data['watching'][uuid]['tag']
|
|
||||||
extras = self.data['watching'][uuid]
|
extras = self.data['watching'][uuid]
|
||||||
new_uuid = self.add_watch(url=url, tag=tag, extras=extras)
|
new_uuid = self.add_watch(url=url, extras=extras)
|
||||||
return new_uuid
|
return new_uuid
|
||||||
|
|
||||||
def url_exists(self, url):
|
def url_exists(self, url):
|
||||||
@@ -254,16 +242,14 @@ class ChangeDetectionStore:
|
|||||||
|
|
||||||
self.needs_write_urgent = True
|
self.needs_write_urgent = True
|
||||||
|
|
||||||
def add_watch(self, url, tag="", extras=None, write_to_disk_now=True):
|
def add_watch(self, url, tag='', extras=None, tag_uuids=None, write_to_disk_now=True):
|
||||||
|
|
||||||
if extras is None:
|
if extras is None:
|
||||||
extras = {}
|
extras = {}
|
||||||
# should always be str
|
|
||||||
if tag is None or not tag:
|
|
||||||
tag = ''
|
|
||||||
|
|
||||||
# Incase these are copied across, assume it's a reference and deepcopy()
|
# Incase these are copied across, assume it's a reference and deepcopy()
|
||||||
apply_extras = deepcopy(extras)
|
apply_extras = deepcopy(extras)
|
||||||
|
apply_extras['tags'] = [] if not apply_extras.get('tags') else apply_extras.get('tags')
|
||||||
|
|
||||||
# Was it a share link? try to fetch the data
|
# Was it a share link? try to fetch the data
|
||||||
if (url.startswith("https://changedetection.io/share/")):
|
if (url.startswith("https://changedetection.io/share/")):
|
||||||
@@ -290,6 +276,7 @@ class ChangeDetectionStore:
|
|||||||
'processor',
|
'processor',
|
||||||
'subtractive_selectors',
|
'subtractive_selectors',
|
||||||
'tag',
|
'tag',
|
||||||
|
'tags',
|
||||||
'text_should_not_be_present',
|
'text_should_not_be_present',
|
||||||
'title',
|
'title',
|
||||||
'trigger_text',
|
'trigger_text',
|
||||||
@@ -312,25 +299,36 @@ class ChangeDetectionStore:
|
|||||||
flash('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX', 'error')
|
flash('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX', 'error')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
with self.lock:
|
if tag and type(tag) == str:
|
||||||
# #Re 569
|
# Then it's probably a string of the actual tag by name, split and add it
|
||||||
new_watch = Watch.model(datastore_path=self.datastore_path, default={
|
for t in tag.split(','):
|
||||||
'url': url,
|
# for each stripped tag, add tag as UUID
|
||||||
'tag': tag,
|
for a_t in t.split(','):
|
||||||
'date_created': int(time.time())
|
tag_uuid = self.add_tag(a_t)
|
||||||
})
|
apply_extras['tags'].append(tag_uuid)
|
||||||
|
|
||||||
new_uuid = new_watch['uuid']
|
# Or if UUIDs given directly
|
||||||
logging.debug("Added URL {} - {}".format(url, new_uuid))
|
if tag_uuids:
|
||||||
|
apply_extras['tags'] = list(set(apply_extras['tags'] + tag_uuids))
|
||||||
|
|
||||||
for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']:
|
# Make any uuids unique
|
||||||
if k in apply_extras:
|
if apply_extras.get('tags'):
|
||||||
del apply_extras[k]
|
apply_extras['tags'] = list(set(apply_extras.get('tags')))
|
||||||
|
|
||||||
new_watch.update(apply_extras)
|
new_watch = Watch.model(datastore_path=self.datastore_path, url=url)
|
||||||
self.__data['watching'][new_uuid] = new_watch
|
|
||||||
|
new_uuid = new_watch.get('uuid')
|
||||||
|
|
||||||
|
logging.debug("Added URL {} - {}".format(url, new_uuid))
|
||||||
|
|
||||||
|
for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']:
|
||||||
|
if k in apply_extras:
|
||||||
|
del apply_extras[k]
|
||||||
|
|
||||||
|
new_watch.update(apply_extras)
|
||||||
|
new_watch.ensure_data_dir_exists()
|
||||||
|
self.__data['watching'][new_uuid] = new_watch
|
||||||
|
|
||||||
self.__data['watching'][new_uuid].ensure_data_dir_exists()
|
|
||||||
|
|
||||||
if write_to_disk_now:
|
if write_to_disk_now:
|
||||||
self.sync_to_json()
|
self.sync_to_json()
|
||||||
@@ -510,10 +508,19 @@ class ChangeDetectionStore:
|
|||||||
filepath = os.path.join(self.datastore_path, 'headers.txt')
|
filepath = os.path.join(self.datastore_path, 'headers.txt')
|
||||||
return os.path.isfile(filepath)
|
return os.path.isfile(filepath)
|
||||||
|
|
||||||
def get_all_headers(self):
|
def get_all_base_headers(self):
|
||||||
from .model.App import parse_headers_from_text_file
|
from .model.App import parse_headers_from_text_file
|
||||||
headers = copy(self.data['settings'].get('headers', {}))
|
headers = {}
|
||||||
|
# Global app settings
|
||||||
|
headers.update(self.data['settings'].get('headers', {}))
|
||||||
|
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def get_all_headers_in_textfile_for_watch(self, uuid):
|
||||||
|
from .model.App import parse_headers_from_text_file
|
||||||
|
headers = {}
|
||||||
|
|
||||||
|
# Global in /datastore/headers.txt
|
||||||
filepath = os.path.join(self.datastore_path, 'headers.txt')
|
filepath = os.path.join(self.datastore_path, 'headers.txt')
|
||||||
try:
|
try:
|
||||||
if os.path.isfile(filepath):
|
if os.path.isfile(filepath):
|
||||||
@@ -521,8 +528,79 @@ class ChangeDetectionStore:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||||
|
|
||||||
|
watch = self.data['watching'].get(uuid)
|
||||||
|
if watch:
|
||||||
|
|
||||||
|
# In /datastore/xyz-xyz/headers.txt
|
||||||
|
filepath = os.path.join(watch.watch_data_dir, 'headers.txt')
|
||||||
|
try:
|
||||||
|
if os.path.isfile(filepath):
|
||||||
|
headers.update(parse_headers_from_text_file(filepath))
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||||
|
|
||||||
|
# In /datastore/tag-name.txt
|
||||||
|
tags = self.get_all_tags_for_watch(uuid=uuid)
|
||||||
|
for tag_uuid, tag in tags.items():
|
||||||
|
fname = "headers-"+re.sub(r'[\W_]', '', tag.get('title')).lower().strip() + ".txt"
|
||||||
|
filepath = os.path.join(self.datastore_path, fname)
|
||||||
|
try:
|
||||||
|
if os.path.isfile(filepath):
|
||||||
|
headers.update(parse_headers_from_text_file(filepath))
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||||
|
|
||||||
return headers
|
return headers
|
||||||
|
|
||||||
|
def get_tag_overrides_for_watch(self, uuid, attr):
|
||||||
|
tags = self.get_all_tags_for_watch(uuid=uuid)
|
||||||
|
ret = []
|
||||||
|
|
||||||
|
if tags:
|
||||||
|
for tag_uuid, tag in tags.items():
|
||||||
|
if attr in tag and tag[attr]:
|
||||||
|
ret=[*ret, *tag[attr]]
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def add_tag(self, name):
|
||||||
|
# If name exists, return that
|
||||||
|
n = name.strip().lower()
|
||||||
|
print (f">>> Adding new tag - '{n}'")
|
||||||
|
if not n:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for uuid, tag in self.__data['settings']['application'].get('tags', {}).items():
|
||||||
|
if n == tag.get('title', '').lower().strip():
|
||||||
|
print (f">>> Tag {name} already exists")
|
||||||
|
return uuid
|
||||||
|
|
||||||
|
# Eventually almost everything todo with a watch will apply as a Tag
|
||||||
|
# So we use the same model as a Watch
|
||||||
|
with self.lock:
|
||||||
|
new_tag = Watch.model(datastore_path=self.datastore_path, default={
|
||||||
|
'title': name.strip(),
|
||||||
|
'date_created': int(time.time())
|
||||||
|
})
|
||||||
|
|
||||||
|
new_uuid = new_tag.get('uuid')
|
||||||
|
|
||||||
|
self.__data['settings']['application']['tags'][new_uuid] = new_tag
|
||||||
|
|
||||||
|
return new_uuid
|
||||||
|
|
||||||
|
def get_all_tags_for_watch(self, uuid):
|
||||||
|
"""This should be in Watch model but Watch doesn't have access to datastore, not sure how to solve that yet"""
|
||||||
|
watch = self.data['watching'].get(uuid)
|
||||||
|
|
||||||
|
# Should return a dict of full tag info linked by UUID
|
||||||
|
if watch:
|
||||||
|
return dictfilt(self.__data['settings']['application']['tags'], watch.get('tags', []))
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def tag_exists_by_name(self, tag_name):
|
||||||
|
return any(v.get('title', '').lower() == tag_name.lower() for k, v in self.__data['settings']['application']['tags'].items())
|
||||||
|
|
||||||
# Run all updates
|
# Run all updates
|
||||||
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
|
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
|
||||||
@@ -709,3 +787,16 @@ class ChangeDetectionStore:
|
|||||||
i+=1
|
i+=1
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Create tag objects and their references from existing tag text
|
||||||
|
def update_12(self):
|
||||||
|
i = 0
|
||||||
|
for uuid, watch in self.data['watching'].items():
|
||||||
|
# Split out and convert old tag string
|
||||||
|
tag = watch.get('tag')
|
||||||
|
if tag:
|
||||||
|
tag_uuids = []
|
||||||
|
for t in tag.split(','):
|
||||||
|
tag_uuids.append(self.add_tag(name=t))
|
||||||
|
|
||||||
|
self.data['watching'][uuid]['tags'] = tag_uuids
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
{% macro render_field(field) %}
|
{% macro render_field(field) %}
|
||||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
|
||||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
||||||
|
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||||
{% if field.errors %}
|
{% if field.errors %}
|
||||||
<ul class=errors>
|
<ul class=errors>
|
||||||
{% for error in field.errors %}
|
{% for error in field.errors %}
|
||||||
@@ -25,18 +24,6 @@
|
|||||||
</div>
|
</div>
|
||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
{% macro render_field(field) %}
|
|
||||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
|
||||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
|
||||||
{% if field.errors %}
|
|
||||||
<ul class=errors>
|
|
||||||
{% for error in field.errors %}
|
|
||||||
<li>{{ error }}</li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
{% endmacro %}
|
|
||||||
|
|
||||||
{% macro render_simple_field(field) %}
|
{% macro render_simple_field(field) %}
|
||||||
<span class="label {% if field.errors %}error{% endif %}">{{ field.label }}</span>
|
<span class="label {% if field.errors %}error{% endif %}">{{ field.label }}</span>
|
||||||
|
|||||||
@@ -58,6 +58,9 @@
|
|||||||
{% if current_user.is_authenticated or not has_password %}
|
{% if current_user.is_authenticated or not has_password %}
|
||||||
{% if not
|
{% if not
|
||||||
current_diff_url %}
|
current_diff_url %}
|
||||||
|
<li class="pure-menu-item">
|
||||||
|
<a href="{{ url_for('tags.tags_overview_page')}}" class="pure-menu-link">GROUPS</a>
|
||||||
|
</li>
|
||||||
<li class="pure-menu-item">
|
<li class="pure-menu-item">
|
||||||
<a href="{{ url_for('settings_page')}}" class="pure-menu-link">SETTINGS</a>
|
<a href="{{ url_for('settings_page')}}" class="pure-menu-link">SETTINGS</a>
|
||||||
</li>
|
</li>
|
||||||
@@ -86,17 +89,14 @@
|
|||||||
<!-- We use GET here so it offers people a chance to set bookmarks etc -->
|
<!-- We use GET here so it offers people a chance to set bookmarks etc -->
|
||||||
<form name="searchForm" action="" method="GET">
|
<form name="searchForm" action="" method="GET">
|
||||||
<input id="search-q" class="" name="q" placeholder="URL or Title {% if active_tag %}in '{{ active_tag }}'{% endif %}" required="" type="text" value="">
|
<input id="search-q" class="" name="q" placeholder="URL or Title {% if active_tag %}in '{{ active_tag }}'{% endif %}" required="" type="text" value="">
|
||||||
<input name="tag" type="hidden" value="{% if active_tag %}{{active_tag}}{% endif %}">
|
<input name="tags" type="hidden" value="{% if active_tag %}{{active_tag}}{% endif %}">
|
||||||
<button class="toggle-button " id="toggle-search" type="button" title="Search, or Use Alt+S Key" >
|
<button class="toggle-button " id="toggle-search" type="button" title="Search, or Use Alt+S Key" >
|
||||||
{% include "svgs/search-icon.svg" %}
|
{% include "svgs/search-icon.svg" %}
|
||||||
</button>
|
</button>
|
||||||
</form>
|
</form>
|
||||||
</li>
|
</li>
|
||||||
<li class="pure-menu-item">
|
<li class="pure-menu-item">
|
||||||
{% if dark_mode %}
|
<button class="toggle-button" id ="toggle-light-mode" type="button" title="Toggle Light/Dark Mode">
|
||||||
{% set darkClass = 'dark' %}
|
|
||||||
{% endif %}
|
|
||||||
<button class="toggle-button {{darkClass}}" id ="toggle-light-mode" type="button" title="Toggle Light/Dark Mode">
|
|
||||||
<span class="visually-hidden">Toggle light/dark mode</span>
|
<span class="visually-hidden">Toggle light/dark mode</span>
|
||||||
<span class="icon-light">
|
<span class="icon-light">
|
||||||
{% include "svgs/light-mode-toggle-icon.svg" %}
|
{% include "svgs/light-mode-toggle-icon.svg" %}
|
||||||
|
|||||||
@@ -4,18 +4,19 @@
|
|||||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||||
<script>
|
<script>
|
||||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
|
||||||
const watch_visual_selector_data_url="{{url_for('static_content', group='visual_selector_data', filename=uuid)}}";
|
|
||||||
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
|
|
||||||
const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %};
|
|
||||||
|
|
||||||
{% if emailprefix %}
|
|
||||||
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}');
|
const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}');
|
||||||
const browser_steps_start_url="{{url_for('browser_steps.browsersteps_start_session', uuid=uuid)}}";
|
const browser_steps_start_url="{{url_for('browser_steps.browsersteps_start_session', uuid=uuid)}}";
|
||||||
const browser_steps_sync_url="{{url_for('browser_steps.browsersteps_ui_update', uuid=uuid)}}";
|
const browser_steps_sync_url="{{url_for('browser_steps.browsersteps_ui_update', uuid=uuid)}}";
|
||||||
|
{% if emailprefix %}
|
||||||
|
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
|
||||||
|
{% endif %}
|
||||||
|
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||||
|
const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %};
|
||||||
|
const recheck_proxy_start_url="{{url_for('check_proxies.start_check', uuid=uuid)}}";
|
||||||
|
const proxy_recheck_status_url="{{url_for('check_proxies.get_recheck_status', uuid=uuid)}}";
|
||||||
|
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
|
||||||
|
const watch_visual_selector_data_url="{{url_for('static_content', group='visual_selector_data', filename=uuid)}}";
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
@@ -27,6 +28,8 @@
|
|||||||
<script src="{{url_for('static_content', group='js', filename='browser-steps.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='browser-steps.js')}}" defer></script>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
<script src="{{url_for('static_content', group='js', filename='recheck-proxy.js')}}" defer></script>
|
||||||
|
|
||||||
<div class="edit-form monospaced-textarea">
|
<div class="edit-form monospaced-textarea">
|
||||||
|
|
||||||
<div class="tabs collapsable">
|
<div class="tabs collapsable">
|
||||||
@@ -75,7 +78,7 @@
|
|||||||
{{ render_field(form.title, class="m-d") }}
|
{{ render_field(form.title, class="m-d") }}
|
||||||
</div>
|
</div>
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
{{ render_field(form.tag) }}
|
{{ render_field(form.tags) }}
|
||||||
<span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span>
|
<span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span>
|
||||||
</div>
|
</div>
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
@@ -111,7 +114,8 @@
|
|||||||
</div>
|
</div>
|
||||||
{% if form.proxy %}
|
{% if form.proxy %}
|
||||||
<div class="pure-control-group inline-radio">
|
<div class="pure-control-group inline-radio">
|
||||||
{{ render_field(form.proxy, class="fetch-backend-proxy") }}
|
<div>{{ form.proxy.label }} <a href="" id="check-all-proxies" class="pure-button button-secondary button-xsmall" >Check/Scan all</a></div>
|
||||||
|
<div>{{ form.proxy(class="fetch-backend-proxy") }}</div>
|
||||||
<span class="pure-form-message-inline">
|
<span class="pure-form-message-inline">
|
||||||
Choose a proxy for this watch
|
Choose a proxy for this watch
|
||||||
</span>
|
</span>
|
||||||
|
|||||||
@@ -13,7 +13,7 @@
|
|||||||
<div id="watch-add-wrapper-zone">
|
<div id="watch-add-wrapper-zone">
|
||||||
<div>
|
<div>
|
||||||
{{ render_simple_field(form.url, placeholder="https://...", required=true) }}
|
{{ render_simple_field(form.url, placeholder="https://...", required=true) }}
|
||||||
{{ render_simple_field(form.tag, value=active_tag if active_tag else '', placeholder="watch label / tag") }}
|
{{ render_simple_field(form.tags, value=tags[active_tag].title if active_tag else '', placeholder="watch label / tag") }}
|
||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
{{ render_simple_field(form.watch_submit_button, title="Watch this URL!" ) }}
|
{{ render_simple_field(form.watch_submit_button, title="Watch this URL!" ) }}
|
||||||
@@ -30,12 +30,14 @@
|
|||||||
|
|
||||||
<form class="pure-form" action="{{ url_for('form_watch_list_checkbox_operations') }}" method="POST" id="watch-list-form">
|
<form class="pure-form" action="{{ url_for('form_watch_list_checkbox_operations') }}" method="POST" id="watch-list-form">
|
||||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||||
|
<input type="hidden" id="op_extradata" name="op_extradata" value="" >
|
||||||
<div id="checkbox-operations">
|
<div id="checkbox-operations">
|
||||||
<button class="pure-button button-secondary button-xsmall" name="op" value="pause">Pause</button>
|
<button class="pure-button button-secondary button-xsmall" name="op" value="pause">Pause</button>
|
||||||
<button class="pure-button button-secondary button-xsmall" name="op" value="unpause">UnPause</button>
|
<button class="pure-button button-secondary button-xsmall" name="op" value="unpause">UnPause</button>
|
||||||
<button class="pure-button button-secondary button-xsmall" name="op" value="mute">Mute</button>
|
<button class="pure-button button-secondary button-xsmall" name="op" value="mute">Mute</button>
|
||||||
<button class="pure-button button-secondary button-xsmall" name="op" value="unmute">UnMute</button>
|
<button class="pure-button button-secondary button-xsmall" name="op" value="unmute">UnMute</button>
|
||||||
<button class="pure-button button-secondary button-xsmall" name="op" value="recheck">Recheck</button>
|
<button class="pure-button button-secondary button-xsmall" name="op" value="recheck">Recheck</button>
|
||||||
|
<button class="pure-button button-secondary button-xsmall" name="op" value="assign-tag" id="checkbox-assign-tag">Tag</button>
|
||||||
<button class="pure-button button-secondary button-xsmall" name="op" value="mark-viewed">Mark viewed</button>
|
<button class="pure-button button-secondary button-xsmall" name="op" value="mark-viewed">Mark viewed</button>
|
||||||
<button class="pure-button button-secondary button-xsmall" name="op" value="notification-default">Use default notification</button>
|
<button class="pure-button button-secondary button-xsmall" name="op" value="notification-default">Use default notification</button>
|
||||||
<button class="pure-button button-secondary button-xsmall" style="background: #dd4242;" name="op" value="clear-history">Clear/reset history</button>
|
<button class="pure-button button-secondary button-xsmall" style="background: #dd4242;" name="op" value="clear-history">Clear/reset history</button>
|
||||||
@@ -47,9 +49,9 @@
|
|||||||
{% if search_q %}<div id="search-result-info">Searching "<strong><i>{{search_q}}</i></strong>"</div>{% endif %}
|
{% if search_q %}<div id="search-result-info">Searching "<strong><i>{{search_q}}</i></strong>"</div>{% endif %}
|
||||||
<div>
|
<div>
|
||||||
<a href="{{url_for('index')}}" class="pure-button button-tag {{'active' if not active_tag }}">All</a>
|
<a href="{{url_for('index')}}" class="pure-button button-tag {{'active' if not active_tag }}">All</a>
|
||||||
{% for tag in tags %}
|
{% for uuid, tag in tags.items() %}
|
||||||
{% if tag != "" %}
|
{% if tag != "" %}
|
||||||
<a href="{{url_for('index', tag=tag) }}" class="pure-button button-tag {{'active' if active_tag == tag }}">{{ tag }}</a>
|
<a href="{{url_for('index', tag=uuid) }}" class="pure-button button-tag {{'active' if active_tag == uuid }}">{{ tag.title }}</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
@@ -143,9 +145,11 @@
|
|||||||
</span>
|
</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if not active_tag %}
|
|
||||||
<span class="watch-tag-list">{{ watch.tag}}</span>
|
{% for watch_tag_uuid, watch_tag in datastore.get_all_tags_for_watch(watch['uuid']).items() %}
|
||||||
{% endif %}
|
<span class="watch-tag-list">{{ watch_tag.title }}</span>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
<td class="last-checked">{{watch|format_last_checked_time|safe}}</td>
|
<td class="last-checked">{{watch|format_last_checked_time|safe}}</td>
|
||||||
<td class="last-changed">{% if watch.history_n >=2 and watch.last_changed >0 %}
|
<td class="last-changed">{% if watch.history_n >=2 and watch.last_changed >0 %}
|
||||||
@@ -178,7 +182,7 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
<li>
|
<li>
|
||||||
<a href="{{ url_for('form_watch_checknow', tag=active_tag) }}" class="pure-button button-tag ">Recheck
|
<a href="{{ url_for('form_watch_checknow', tag=active_tag) }}" class="pure-button button-tag ">Recheck
|
||||||
all {% if active_tag%}in "{{active_tag}}"{%endif%}</a>
|
all {% if active_tag%} in "{{tags[active_tag].title}}"{%endif%}</a>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<a href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='Generic_Feed-icon.svg')}}" height="15"></a>
|
<a href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='Generic_Feed-icon.svg')}}" height="15"></a>
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ def test_preferred_proxy(client, live_server):
|
|||||||
"fetch_backend": "html_requests",
|
"fetch_backend": "html_requests",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"proxy": "proxy-two",
|
"proxy": "proxy-two",
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"url": url,
|
"url": url,
|
||||||
},
|
},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ def test_restock_detection(client, live_server):
|
|||||||
|
|
||||||
client.post(
|
client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": test_url, "tag": '', 'processor': 'restock_diff'},
|
data={"url": test_url, "tags": '', 'processor': 'restock_diff'},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
42
changedetectionio/tests/smtp/smtp-test-server.py
Executable file
42
changedetectionio/tests/smtp/smtp-test-server.py
Executable file
@@ -0,0 +1,42 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
import smtpd
|
||||||
|
import asyncore
|
||||||
|
|
||||||
|
# Accept a SMTP message and offer a way to retrieve the last message via TCP Socket
|
||||||
|
|
||||||
|
last_received_message = b"Nothing"
|
||||||
|
|
||||||
|
|
||||||
|
class CustomSMTPServer(smtpd.SMTPServer):
|
||||||
|
|
||||||
|
def process_message(self, peer, mailfrom, rcpttos, data, **kwargs):
|
||||||
|
global last_received_message
|
||||||
|
last_received_message = data
|
||||||
|
print('Receiving message from:', peer)
|
||||||
|
print('Message addressed from:', mailfrom)
|
||||||
|
print('Message addressed to :', rcpttos)
|
||||||
|
print('Message length :', len(data))
|
||||||
|
print(data.decode('utf8'))
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
# Just print out the last message received on plain TCP socket server
|
||||||
|
class EchoServer(asyncore.dispatcher):
|
||||||
|
|
||||||
|
def __init__(self, host, port):
|
||||||
|
asyncore.dispatcher.__init__(self)
|
||||||
|
self.create_socket()
|
||||||
|
self.set_reuse_addr()
|
||||||
|
self.bind((host, port))
|
||||||
|
self.listen(5)
|
||||||
|
|
||||||
|
def handle_accepted(self, sock, addr):
|
||||||
|
global last_received_message
|
||||||
|
print('Incoming connection from %s' % repr(addr))
|
||||||
|
sock.send(last_received_message)
|
||||||
|
last_received_message = b''
|
||||||
|
|
||||||
|
|
||||||
|
server = CustomSMTPServer(('0.0.0.0', 11025), None) # SMTP mail goes here
|
||||||
|
server2 = EchoServer('0.0.0.0', 11080) # Echo back last message received
|
||||||
|
asyncore.loop()
|
||||||
165
changedetectionio/tests/smtp/test_notification_smtp.py
Normal file
165
changedetectionio/tests/smtp/test_notification_smtp.py
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import re
|
||||||
|
from flask import url_for
|
||||||
|
from changedetectionio.tests.util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup, \
|
||||||
|
wait_for_all_checks, \
|
||||||
|
set_longer_modified_response
|
||||||
|
from changedetectionio.tests.util import extract_UUID_from_client
|
||||||
|
import logging
|
||||||
|
import base64
|
||||||
|
|
||||||
|
# NOTE - RELIES ON mailserver as hostname running, see github build recipes
|
||||||
|
smtp_test_server = 'mailserver'
|
||||||
|
|
||||||
|
from changedetectionio.notification import (
|
||||||
|
default_notification_body,
|
||||||
|
default_notification_format,
|
||||||
|
default_notification_title,
|
||||||
|
valid_notification_formats,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_setup(live_server):
|
||||||
|
live_server_setup(live_server)
|
||||||
|
|
||||||
|
def get_last_message_from_smtp_server():
|
||||||
|
import socket
|
||||||
|
global smtp_test_server
|
||||||
|
port = 11080 # socket server port number
|
||||||
|
|
||||||
|
client_socket = socket.socket() # instantiate
|
||||||
|
client_socket.connect((smtp_test_server, port)) # connect to the server
|
||||||
|
|
||||||
|
data = client_socket.recv(50024).decode() # receive response
|
||||||
|
client_socket.close() # close the connection
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
# Requires running the test SMTP server
|
||||||
|
|
||||||
|
def test_check_notification_email_formats_default_HTML(client, live_server):
|
||||||
|
# live_server_setup(live_server)
|
||||||
|
set_original_response()
|
||||||
|
|
||||||
|
global smtp_test_server
|
||||||
|
notification_url = f'mailto://changedetection@{smtp_test_server}:11025/?to=fff@home.com'
|
||||||
|
|
||||||
|
#####################
|
||||||
|
# Set this up for when we remove the notification from the watch, it should fallback with these details
|
||||||
|
res = client.post(
|
||||||
|
url_for("settings_page"),
|
||||||
|
data={"application-notification_urls": notification_url,
|
||||||
|
"application-notification_title": "fallback-title " + default_notification_title,
|
||||||
|
"application-notification_body": "fallback-body<br> " + default_notification_body,
|
||||||
|
"application-notification_format": 'HTML',
|
||||||
|
"requests-time_between_check-minutes": 180,
|
||||||
|
'application-fetch_backend': "html_requests"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Settings updated." in res.data
|
||||||
|
|
||||||
|
# Add a watch and trigger a HTTP POST
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("form_quick_watch_add"),
|
||||||
|
data={"url": test_url, "tags": 'nice one'},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
set_longer_modified_response()
|
||||||
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
time.sleep(3)
|
||||||
|
|
||||||
|
msg = get_last_message_from_smtp_server()
|
||||||
|
assert len(msg) >= 1
|
||||||
|
|
||||||
|
# The email should have two bodies, and the text/html part should be <br>
|
||||||
|
assert 'Content-Type: text/plain' in msg
|
||||||
|
assert '(added) So let\'s see what happens.\n' in msg # The plaintext part with \n
|
||||||
|
assert 'Content-Type: text/html' in msg
|
||||||
|
assert '(added) So let\'s see what happens.<br>' in msg # the html part
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_notification_email_formats_default_Text_override_HTML(client, live_server):
|
||||||
|
# live_server_setup(live_server)
|
||||||
|
|
||||||
|
# HTML problems? see this
|
||||||
|
# https://github.com/caronc/apprise/issues/633
|
||||||
|
|
||||||
|
set_original_response()
|
||||||
|
global smtp_test_server
|
||||||
|
notification_url = f'mailto://changedetection@{smtp_test_server}:11025/?to=fff@home.com'
|
||||||
|
|
||||||
|
#####################
|
||||||
|
# Set this up for when we remove the notification from the watch, it should fallback with these details
|
||||||
|
res = client.post(
|
||||||
|
url_for("settings_page"),
|
||||||
|
data={"application-notification_urls": notification_url,
|
||||||
|
"application-notification_title": "fallback-title " + default_notification_title,
|
||||||
|
"application-notification_body": default_notification_body,
|
||||||
|
"application-notification_format": 'Text',
|
||||||
|
"requests-time_between_check-minutes": 180,
|
||||||
|
'application-fetch_backend': "html_requests"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Settings updated." in res.data
|
||||||
|
|
||||||
|
# Add a watch and trigger a HTTP POST
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("form_quick_watch_add"),
|
||||||
|
data={"url": test_url, "tags": 'nice one'},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
set_longer_modified_response()
|
||||||
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
time.sleep(3)
|
||||||
|
msg = get_last_message_from_smtp_server()
|
||||||
|
assert len(msg) >= 1
|
||||||
|
# with open('/tmp/m.txt', 'w') as f:
|
||||||
|
# f.write(msg)
|
||||||
|
|
||||||
|
# The email should not have two bodies, should be TEXT only
|
||||||
|
|
||||||
|
assert 'Content-Type: text/plain' in msg
|
||||||
|
assert '(added) So let\'s see what happens.\n' in msg # The plaintext part with \n
|
||||||
|
|
||||||
|
set_original_response()
|
||||||
|
# Now override as HTML format
|
||||||
|
res = client.post(
|
||||||
|
url_for("edit_page", uuid="first"),
|
||||||
|
data={
|
||||||
|
"url": test_url,
|
||||||
|
"notification_format": 'HTML',
|
||||||
|
'fetch_backend': "html_requests"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Updated watch." in res.data
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
time.sleep(3)
|
||||||
|
msg = get_last_message_from_smtp_server()
|
||||||
|
assert len(msg) >= 1
|
||||||
|
|
||||||
|
# The email should have two bodies, and the text/html part should be <br>
|
||||||
|
assert 'Content-Type: text/plain' in msg
|
||||||
|
assert '(removed) So let\'s see what happens.\n' in msg # The plaintext part with \n
|
||||||
|
assert 'Content-Type: text/html' in msg
|
||||||
|
assert '(removed) So let\'s see what happens.<br>' in msg # the html part
|
||||||
|
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
@@ -45,6 +45,15 @@ def test_check_access_control(app, client, live_server):
|
|||||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||||
assert b'Random content' in res.data
|
assert b'Random content' in res.data
|
||||||
|
|
||||||
|
# Check wrong password does not let us in
|
||||||
|
res = c.post(
|
||||||
|
url_for("login"),
|
||||||
|
data={"password": "WRONG PASSWORD"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"LOG OUT" not in res.data
|
||||||
|
assert b"Incorrect password" in res.data
|
||||||
|
|
||||||
|
|
||||||
# Menu should not be available yet
|
# Menu should not be available yet
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from .util import live_server_setup
|
from .util import live_server_setup, wait_for_all_checks
|
||||||
from changedetectionio import html_tools
|
from changedetectionio import html_tools
|
||||||
|
|
||||||
|
|
||||||
@@ -39,7 +39,6 @@ def test_setup(client, live_server):
|
|||||||
live_server_setup(live_server)
|
live_server_setup(live_server)
|
||||||
|
|
||||||
def test_check_removed_line_contains_trigger(client, live_server):
|
def test_check_removed_line_contains_trigger(client, live_server):
|
||||||
sleep_time_for_fetch_thread = 3
|
|
||||||
|
|
||||||
# Give the endpoint time to spin up
|
# Give the endpoint time to spin up
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@@ -54,7 +53,7 @@ def test_check_removed_line_contains_trigger(client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
@@ -67,20 +66,20 @@ def test_check_removed_line_contains_trigger(client, live_server):
|
|||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
set_original(excluding='Something irrelevant')
|
set_original(excluding='Something irrelevant')
|
||||||
|
|
||||||
# A line thats not the trigger should not trigger anything
|
# A line thats not the trigger should not trigger anything
|
||||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
assert b'1 watches queued for rechecking.' in res.data
|
assert b'1 watches queued for rechecking.' in res.data
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' not in res.data
|
assert b'unviewed' not in res.data
|
||||||
|
|
||||||
# The trigger line is REMOVED, this should trigger
|
# The trigger line is REMOVED, this should trigger
|
||||||
set_original(excluding='The golden line')
|
set_original(excluding='The golden line')
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
|
|
||||||
@@ -89,14 +88,14 @@ def test_check_removed_line_contains_trigger(client, live_server):
|
|||||||
client.get(url_for("mark_all_viewed"), follow_redirects=True)
|
client.get(url_for("mark_all_viewed"), follow_redirects=True)
|
||||||
set_original(excluding=None)
|
set_original(excluding=None)
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' not in res.data
|
assert b'unviewed' not in res.data
|
||||||
|
|
||||||
# Remove it again, and we should get a trigger
|
# Remove it again, and we should get a trigger
|
||||||
set_original(excluding='The golden line')
|
set_original(excluding='The golden line')
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
|
|
||||||
@@ -105,8 +104,7 @@ def test_check_removed_line_contains_trigger(client, live_server):
|
|||||||
|
|
||||||
|
|
||||||
def test_check_add_line_contains_trigger(client, live_server):
|
def test_check_add_line_contains_trigger(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
sleep_time_for_fetch_thread = 3
|
|
||||||
|
|
||||||
# Give the endpoint time to spin up
|
# Give the endpoint time to spin up
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@@ -136,8 +134,7 @@ def test_check_add_line_contains_trigger(client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
@@ -150,23 +147,25 @@ def test_check_add_line_contains_trigger(client, live_server):
|
|||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
set_original(excluding='Something irrelevant')
|
set_original(excluding='Something irrelevant')
|
||||||
|
|
||||||
# A line thats not the trigger should not trigger anything
|
# A line thats not the trigger should not trigger anything
|
||||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
assert b'1 watches queued for rechecking.' in res.data
|
assert b'1 watches queued for rechecking.' in res.data
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' not in res.data
|
assert b'unviewed' not in res.data
|
||||||
|
|
||||||
# The trigger line is ADDED, this should trigger
|
# The trigger line is ADDED, this should trigger
|
||||||
set_original(add_line='<p>Oh yes please</p>')
|
set_original(add_line='<p>Oh yes please</p>')
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
|
|
||||||
|
# Takes a moment for apprise to fire
|
||||||
|
time.sleep(3)
|
||||||
with open("test-datastore/notification.txt", 'r') as f:
|
with open("test-datastore/notification.txt", 'r') as f:
|
||||||
response= f.read()
|
response= f.read()
|
||||||
assert '-Oh yes please-' in response
|
assert '-Oh yes please-' in response
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from .util import live_server_setup, extract_api_key_from_UI
|
from .util import live_server_setup, extract_api_key_from_UI, wait_for_all_checks
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import uuid
|
import uuid
|
||||||
@@ -57,6 +57,7 @@ def test_setup(client, live_server):
|
|||||||
live_server_setup(live_server)
|
live_server_setup(live_server)
|
||||||
|
|
||||||
def test_api_simple(client, live_server):
|
def test_api_simple(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
api_key = extract_api_key_from_UI(client)
|
api_key = extract_api_key_from_UI(client)
|
||||||
|
|
||||||
@@ -86,7 +87,7 @@ def test_api_simple(client, live_server):
|
|||||||
watch_uuid = res.json.get('uuid')
|
watch_uuid = res.json.get('uuid')
|
||||||
assert res.status_code == 201
|
assert res.status_code == 201
|
||||||
|
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Verify its in the list and that recheck worked
|
# Verify its in the list and that recheck worked
|
||||||
res = client.get(
|
res = client.get(
|
||||||
@@ -107,7 +108,7 @@ def test_api_simple(client, live_server):
|
|||||||
)
|
)
|
||||||
assert len(res.json) == 0
|
assert len(res.json) == 0
|
||||||
|
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
set_modified_response()
|
set_modified_response()
|
||||||
# Trigger recheck of all ?recheck_all=1
|
# Trigger recheck of all ?recheck_all=1
|
||||||
@@ -115,7 +116,7 @@ def test_api_simple(client, live_server):
|
|||||||
url_for("createwatch", recheck_all='1'),
|
url_for("createwatch", recheck_all='1'),
|
||||||
headers={'x-api-key': api_key},
|
headers={'x-api-key': api_key},
|
||||||
)
|
)
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Did the recheck fire?
|
# Did the recheck fire?
|
||||||
res = client.get(
|
res = client.get(
|
||||||
@@ -266,7 +267,7 @@ def test_api_watch_PUT_update(client, live_server):
|
|||||||
|
|
||||||
#live_server_setup(live_server)
|
#live_server_setup(live_server)
|
||||||
api_key = extract_api_key_from_UI(client)
|
api_key = extract_api_key_from_UI(client)
|
||||||
time.sleep(1)
|
|
||||||
# Create a watch
|
# Create a watch
|
||||||
set_original_response()
|
set_original_response()
|
||||||
test_url = url_for('test_endpoint', _external=True,
|
test_url = url_for('test_endpoint', _external=True,
|
||||||
@@ -282,7 +283,6 @@ def test_api_watch_PUT_update(client, live_server):
|
|||||||
|
|
||||||
assert res.status_code == 201
|
assert res.status_code == 201
|
||||||
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
# Get a listing, it will be the first one
|
# Get a listing, it will be the first one
|
||||||
res = client.get(
|
res = client.get(
|
||||||
@@ -297,6 +297,8 @@ def test_api_watch_PUT_update(client, live_server):
|
|||||||
url_for("edit_page", uuid=watch_uuid),
|
url_for("edit_page", uuid=watch_uuid),
|
||||||
)
|
)
|
||||||
assert b"cookie: yum" in res.data, "'cookie: yum' found in 'headers' section"
|
assert b"cookie: yum" in res.data, "'cookie: yum' found in 'headers' section"
|
||||||
|
assert b"One" in res.data, "Tag 'One' was found"
|
||||||
|
assert b"Two" in res.data, "Tag 'Two' was found"
|
||||||
|
|
||||||
# HTTP PUT ( UPDATE an existing watch )
|
# HTTP PUT ( UPDATE an existing watch )
|
||||||
res = client.put(
|
res = client.put(
|
||||||
@@ -319,7 +321,8 @@ def test_api_watch_PUT_update(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"new title" in res.data, "new title found in edit page"
|
assert b"new title" in res.data, "new title found in edit page"
|
||||||
assert b"552" in res.data, "552 minutes found in edit page"
|
assert b"552" in res.data, "552 minutes found in edit page"
|
||||||
assert b"One, Two" in res.data, "Tag 'One, Two' was found"
|
assert b"One" in res.data, "Tag 'One' was found"
|
||||||
|
assert b"Two" in res.data, "Tag 'Two' was found"
|
||||||
assert b"cookie: all eaten" in res.data, "'cookie: all eaten' found in 'headers' section"
|
assert b"cookie: all eaten" in res.data, "'cookie: all eaten' found in 'headers' section"
|
||||||
|
|
||||||
######################################################
|
######################################################
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ def test_basic_auth(client, live_server):
|
|||||||
# Check form validation
|
# Check form validation
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": "", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
data={"include_filters": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from . util import live_server_setup
|
from .util import live_server_setup, wait_for_all_checks
|
||||||
from changedetectionio import html_tools
|
from changedetectionio import html_tools
|
||||||
|
|
||||||
def set_original_ignore_response():
|
def set_original_ignore_response():
|
||||||
@@ -61,7 +61,7 @@ def set_modified_response_minus_block_text():
|
|||||||
|
|
||||||
|
|
||||||
def test_check_block_changedetection_text_NOT_present(client, live_server):
|
def test_check_block_changedetection_text_NOT_present(client, live_server):
|
||||||
sleep_time_for_fetch_thread = 3
|
|
||||||
live_server_setup(live_server)
|
live_server_setup(live_server)
|
||||||
# Use a mix of case in ZzZ to prove it works case-insensitive.
|
# Use a mix of case in ZzZ to prove it works case-insensitive.
|
||||||
ignore_text = "out of stoCk\r\nfoobar"
|
ignore_text = "out of stoCk\r\nfoobar"
|
||||||
@@ -81,7 +81,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
@@ -96,7 +96,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
|||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
# Check it saved
|
# Check it saved
|
||||||
res = client.get(
|
res = client.get(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
@@ -107,7 +107,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
|||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should report nothing found (no new 'unviewed' class)
|
# It should report nothing found (no new 'unviewed' class)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -120,7 +120,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
|||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should report nothing found (no new 'unviewed' class)
|
# It should report nothing found (no new 'unviewed' class)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -131,7 +131,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
|||||||
# Now we set a change where the text is gone, it should now trigger
|
# Now we set a change where the text is gone, it should now trigger
|
||||||
set_modified_response_minus_block_text()
|
set_modified_response_minus_block_text()
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
|
|
||||||
|
|||||||
@@ -96,7 +96,7 @@ def test_check_markup_include_filters_restriction(client, live_server):
|
|||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": include_filters, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
@@ -157,7 +157,7 @@ def test_check_multiple_filters(client, live_server):
|
|||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": include_filters,
|
data={"include_filters": include_filters,
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
'fetch_backend': "html_requests"},
|
'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
|
|||||||
@@ -129,7 +129,7 @@ def test_element_removal_full(client, live_server):
|
|||||||
data={
|
data={
|
||||||
"subtractive_selectors": subtractive_selectors_data,
|
"subtractive_selectors": subtractive_selectors_data,
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"fetch_backend": "html_requests",
|
"fetch_backend": "html_requests",
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -91,7 +91,7 @@ def test_check_filter_multiline(client, live_server):
|
|||||||
data={"include_filters": '',
|
data={"include_filters": '',
|
||||||
'extract_text': '/something.+?6 billion.+?lines/si',
|
'extract_text': '/something.+?6 billion.+?lines/si',
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
'fetch_backend': "html_requests"
|
'fetch_backend': "html_requests"
|
||||||
},
|
},
|
||||||
@@ -146,7 +146,7 @@ def test_check_filter_and_regex_extract(client, live_server):
|
|||||||
data={"include_filters": include_filters,
|
data={"include_filters": include_filters,
|
||||||
'extract_text': '\d+ online\r\n\d+ guests\r\n/somecase insensitive \d+/i\r\n/somecase insensitive (345\d)/i',
|
'extract_text': '\d+ online\r\n\d+ guests\r\n/somecase insensitive \d+/i\r\n/somecase insensitive (345\d)/i',
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
'fetch_backend': "html_requests"
|
'fetch_backend': "html_requests"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se
|
|||||||
test_url = url_for('test_endpoint', _external=True)
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": test_url, "tag": 'cinema'},
|
data={"url": test_url, "tags": 'cinema'},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Watch added" in res.data
|
assert b"Watch added" in res.data
|
||||||
@@ -89,7 +89,7 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se
|
|||||||
|
|
||||||
notification_form_data.update({
|
notification_form_data.update({
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "my tag",
|
"tags": "my tag",
|
||||||
"title": "my title",
|
"title": "my title",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"include_filters": '.ticket-available',
|
"include_filters": '.ticket-available',
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from .util import set_original_response, live_server_setup, extract_UUID_from_client
|
from .util import set_original_response, live_server_setup, extract_UUID_from_client, wait_for_all_checks
|
||||||
from changedetectionio.model import App
|
from changedetectionio.model import App
|
||||||
|
|
||||||
|
|
||||||
@@ -37,14 +37,14 @@ def run_filter_test(client, content_filter):
|
|||||||
test_url = url_for('test_endpoint', _external=True)
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": test_url, "tag": ''},
|
data={"url": test_url, "tags": ''},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
|
||||||
assert b"Watch added" in res.data
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick up the first version
|
# Give the thread time to pick up the first version
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
@@ -71,8 +71,8 @@ def run_filter_test(client, content_filter):
|
|||||||
|
|
||||||
notification_form_data.update({
|
notification_form_data.update({
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "my tag",
|
"tags": "my tag",
|
||||||
"title": "my title",
|
"title": "my title 123",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"filter_failure_notification_send": 'y',
|
"filter_failure_notification_send": 'y',
|
||||||
"include_filters": content_filter,
|
"include_filters": content_filter,
|
||||||
@@ -85,43 +85,55 @@ def run_filter_test(client, content_filter):
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Now the notification should not exist, because we didnt reach the threshold
|
# Now the notification should not exist, because we didnt reach the threshold
|
||||||
assert not os.path.isfile("test-datastore/notification.txt")
|
assert not os.path.isfile("test-datastore/notification.txt")
|
||||||
|
|
||||||
for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT):
|
# -2 because we would have checked twice above (on adding and on edit)
|
||||||
|
for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT-2):
|
||||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
assert not os.path.isfile("test-datastore/notification.txt"), f"test-datastore/notification.txt should not exist - Attempt {i}"
|
||||||
|
|
||||||
# We should see something in the frontend
|
# We should see something in the frontend
|
||||||
assert b'Warning, no filters were found' in res.data
|
assert b'Warning, no filters were found' in res.data
|
||||||
|
|
||||||
|
# One more check should trigger it (see -2 above)
|
||||||
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
# Now it should exist and contain our "filter not found" alert
|
# Now it should exist and contain our "filter not found" alert
|
||||||
assert os.path.isfile("test-datastore/notification.txt")
|
assert os.path.isfile("test-datastore/notification.txt")
|
||||||
notification = False
|
|
||||||
with open("test-datastore/notification.txt", 'r') as f:
|
with open("test-datastore/notification.txt", 'r') as f:
|
||||||
notification = f.read()
|
notification = f.read()
|
||||||
|
|
||||||
assert 'CSS/xPath filter was not present in the page' in notification
|
assert 'CSS/xPath filter was not present in the page' in notification
|
||||||
assert content_filter.replace('"', '\\"') in notification
|
assert content_filter.replace('"', '\\"') in notification
|
||||||
|
|
||||||
# Remove it and prove that it doesnt trigger when not expected
|
# Remove it and prove that it doesn't trigger when not expected
|
||||||
|
# It should register a change, but no 'filter not found'
|
||||||
os.unlink("test-datastore/notification.txt")
|
os.unlink("test-datastore/notification.txt")
|
||||||
set_response_with_filter()
|
set_response_with_filter()
|
||||||
|
|
||||||
|
# Try several times, it should NOT have 'filter not found'
|
||||||
for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT):
|
for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT):
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should have sent a notification, but..
|
# It should have sent a notification, but..
|
||||||
assert os.path.isfile("test-datastore/notification.txt")
|
assert os.path.isfile("test-datastore/notification.txt")
|
||||||
# but it should not contain the info about the failed filter
|
# but it should not contain the info about a failed filter (because there was none in this case)
|
||||||
with open("test-datastore/notification.txt", 'r') as f:
|
with open("test-datastore/notification.txt", 'r') as f:
|
||||||
notification = f.read()
|
notification = f.read()
|
||||||
assert not 'CSS/xPath filter was not present in the page' in notification
|
assert not 'CSS/xPath filter was not present in the page' in notification
|
||||||
|
|
||||||
# Re #1247 - All tokens got replaced
|
# Re #1247 - All tokens got replaced correctly in the notification
|
||||||
|
res = client.get(url_for("index"))
|
||||||
uuid = extract_UUID_from_client(client)
|
uuid = extract_UUID_from_client(client)
|
||||||
|
# UUID is correct, but notification contains tag uuid as UUIID wtf
|
||||||
assert uuid in notification
|
assert uuid in notification
|
||||||
|
|
||||||
# cleanup for the next
|
# cleanup for the next
|
||||||
@@ -137,7 +149,7 @@ def test_setup(live_server):
|
|||||||
|
|
||||||
def test_check_include_filters_failure_notification(client, live_server):
|
def test_check_include_filters_failure_notification(client, live_server):
|
||||||
set_original_response()
|
set_original_response()
|
||||||
time.sleep(1)
|
wait_for_all_checks(client)
|
||||||
run_filter_test(client, '#nope-doesnt-exist')
|
run_filter_test(client, '#nope-doesnt-exist')
|
||||||
|
|
||||||
def test_check_xpath_filter_failure_notification(client, live_server):
|
def test_check_xpath_filter_failure_notification(client, live_server):
|
||||||
|
|||||||
322
changedetectionio/tests/test_group.py
Normal file
322
changedetectionio/tests/test_group.py
Normal file
@@ -0,0 +1,322 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
import time
|
||||||
|
from flask import url_for
|
||||||
|
from .util import live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, get_UUID_for_tag_name, extract_UUID_from_client
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def test_setup(client, live_server):
|
||||||
|
live_server_setup(live_server)
|
||||||
|
|
||||||
|
def set_original_response():
|
||||||
|
test_return_data = """<html>
|
||||||
|
<body>
|
||||||
|
Some initial text<br>
|
||||||
|
<p id="only-this">Should be only this</p>
|
||||||
|
<br>
|
||||||
|
<p id="not-this">And never this</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
|
f.write(test_return_data)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set_modified_response():
|
||||||
|
test_return_data = """<html>
|
||||||
|
<body>
|
||||||
|
Some initial text<br>
|
||||||
|
<p id="only-this">Should be REALLY only this</p>
|
||||||
|
<br>
|
||||||
|
<p id="not-this">And never this</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
|
f.write(test_return_data)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def test_setup_group_tag(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
set_original_response()
|
||||||
|
|
||||||
|
# Add a tag with some config, import a tag and it should roughly work
|
||||||
|
res = client.post(
|
||||||
|
url_for("tags.form_tag_add"),
|
||||||
|
data={"name": "test-tag"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Tag added" in res.data
|
||||||
|
assert b"test-tag" in res.data
|
||||||
|
|
||||||
|
res = client.post(
|
||||||
|
url_for("tags.form_tag_edit_submit", uuid="first"),
|
||||||
|
data={"name": "test-tag",
|
||||||
|
"include_filters": '#only-this',
|
||||||
|
"subtractive_selectors": '#not-this'},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Updated" in res.data
|
||||||
|
tag_uuid = get_UUID_for_tag_name(client, name="test-tag")
|
||||||
|
res = client.get(
|
||||||
|
url_for("tags.form_tag_edit", uuid="first")
|
||||||
|
)
|
||||||
|
assert b"#only-this" in res.data
|
||||||
|
assert b"#not-this" in res.data
|
||||||
|
|
||||||
|
# Tag should be setup and ready, now add a watch
|
||||||
|
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": test_url + "?first-imported=1 test-tag, extra-import-tag"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
assert b'import-tag' in res.data
|
||||||
|
assert b'extra-import-tag' in res.data
|
||||||
|
|
||||||
|
res = client.get(
|
||||||
|
url_for("tags.tags_overview_page"),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b'import-tag' in res.data
|
||||||
|
assert b'extra-import-tag' in res.data
|
||||||
|
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
assert b'Warning, no filters were found' not in res.data
|
||||||
|
|
||||||
|
res = client.get(
|
||||||
|
url_for("preview_page", uuid="first"),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b'Should be only this' in res.data
|
||||||
|
assert b'And never this' not in res.data
|
||||||
|
|
||||||
|
|
||||||
|
# RSS Group tag filter
|
||||||
|
# An extra one that should be excluded
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": test_url + "?should-be-excluded=1 some-tag"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"1 Imported" in res.data
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
set_modified_response()
|
||||||
|
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
rss_token = extract_rss_token_from_UI(client)
|
||||||
|
res = client.get(
|
||||||
|
url_for("rss", token=rss_token, tag="extra-import-tag", _external=True),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"should-be-excluded" not in res.data
|
||||||
|
assert res.status_code == 200
|
||||||
|
assert b"first-imported=1" in res.data
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
def test_tag_import_singular(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": test_url + " test-tag, test-tag\r\n"+ test_url + "?x=1 test-tag, test-tag\r\n"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"2 Imported" in res.data
|
||||||
|
|
||||||
|
res = client.get(
|
||||||
|
url_for("tags.tags_overview_page"),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
# Should be only 1 tag because they both had the same
|
||||||
|
assert res.data.count(b'test-tag') == 1
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
def test_tag_add_in_ui(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
#
|
||||||
|
res = client.post(
|
||||||
|
url_for("tags.form_tag_add"),
|
||||||
|
data={"name": "new-test-tag"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Tag added" in res.data
|
||||||
|
assert b"new-test-tag" in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||||
|
assert b'All tags deleted' in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
def test_group_tag_notification(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
set_original_response()
|
||||||
|
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("form_quick_watch_add"),
|
||||||
|
data={"url": test_url, "tags": 'test-tag, other-tag'},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
|
notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json')
|
||||||
|
notification_form_data = {"notification_urls": notification_url,
|
||||||
|
"notification_title": "New GROUP TAG ChangeDetection.io Notification - {{watch_url}}",
|
||||||
|
"notification_body": "BASE URL: {{base_url}}\n"
|
||||||
|
"Watch URL: {{watch_url}}\n"
|
||||||
|
"Watch UUID: {{watch_uuid}}\n"
|
||||||
|
"Watch title: {{watch_title}}\n"
|
||||||
|
"Watch tag: {{watch_tag}}\n"
|
||||||
|
"Preview: {{preview_url}}\n"
|
||||||
|
"Diff URL: {{diff_url}}\n"
|
||||||
|
"Snapshot: {{current_snapshot}}\n"
|
||||||
|
"Diff: {{diff}}\n"
|
||||||
|
"Diff Added: {{diff_added}}\n"
|
||||||
|
"Diff Removed: {{diff_removed}}\n"
|
||||||
|
"Diff Full: {{diff_full}}\n"
|
||||||
|
":-)",
|
||||||
|
"notification_screenshot": True,
|
||||||
|
"notification_format": "Text",
|
||||||
|
"title": "test-tag"}
|
||||||
|
|
||||||
|
res = client.post(
|
||||||
|
url_for("tags.form_tag_edit_submit", uuid=get_UUID_for_tag_name(client, name="test-tag")),
|
||||||
|
data=notification_form_data,
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Updated" in res.data
|
||||||
|
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
set_modified_response()
|
||||||
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
time.sleep(3)
|
||||||
|
|
||||||
|
assert os.path.isfile("test-datastore/notification.txt")
|
||||||
|
|
||||||
|
# Verify what was sent as a notification, this file should exist
|
||||||
|
with open("test-datastore/notification.txt", "r") as f:
|
||||||
|
notification_submission = f.read()
|
||||||
|
os.unlink("test-datastore/notification.txt")
|
||||||
|
|
||||||
|
# Did we see the URL that had a change, in the notification?
|
||||||
|
# Diff was correctly executed
|
||||||
|
assert test_url in notification_submission
|
||||||
|
assert ':-)' in notification_submission
|
||||||
|
assert "Diff Full: Some initial text" in notification_submission
|
||||||
|
assert "New GROUP TAG ChangeDetection.io" in notification_submission
|
||||||
|
assert "test-tag" in notification_submission
|
||||||
|
assert "other-tag" in notification_submission
|
||||||
|
|
||||||
|
#@todo Test that multiple notifications fired
|
||||||
|
#@todo Test that each of multiple notifications with different settings
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
def test_limit_tag_ui(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
urls=[]
|
||||||
|
|
||||||
|
for i in range(20):
|
||||||
|
urls.append(test_url+"?x="+str(i)+" test-tag")
|
||||||
|
|
||||||
|
for i in range(20):
|
||||||
|
urls.append(test_url+"?non-grouped="+str(i))
|
||||||
|
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": "\r\n".join(urls)},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"40 Imported" in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
assert b'test-tag' in res.data
|
||||||
|
|
||||||
|
# All should be here
|
||||||
|
assert res.data.count(b'processor-text_json_diff') == 40
|
||||||
|
|
||||||
|
tag_uuid = get_UUID_for_tag_name(client, name="test-tag")
|
||||||
|
|
||||||
|
res = client.get(url_for("index", tag=tag_uuid))
|
||||||
|
|
||||||
|
# Just a subset should be here
|
||||||
|
assert b'test-tag' in res.data
|
||||||
|
assert res.data.count(b'processor-text_json_diff') == 20
|
||||||
|
assert b"object at" not in res.data
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||||
|
assert b'All tags deleted' in res.data
|
||||||
|
def test_clone_tag_on_import(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": test_url + " test-tag, another-tag\r\n"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
assert b'test-tag' in res.data
|
||||||
|
assert b'another-tag' in res.data
|
||||||
|
|
||||||
|
watch_uuid = extract_UUID_from_client(client)
|
||||||
|
res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True)
|
||||||
|
|
||||||
|
assert b'Cloned' in res.data
|
||||||
|
# 2 times plus the top link to tag
|
||||||
|
assert res.data.count(b'test-tag') == 3
|
||||||
|
assert res.data.count(b'another-tag') == 3
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
def test_clone_tag_on_quickwatchform_add(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
|
||||||
|
res = client.post(
|
||||||
|
url_for("form_quick_watch_add"),
|
||||||
|
data={"url": test_url, "tags": ' test-tag, another-tag '},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
assert b'test-tag' in res.data
|
||||||
|
assert b'another-tag' in res.data
|
||||||
|
|
||||||
|
watch_uuid = extract_UUID_from_client(client)
|
||||||
|
res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True)
|
||||||
|
|
||||||
|
assert b'Cloned' in res.data
|
||||||
|
# 2 times plus the top link to tag
|
||||||
|
assert res.data.count(b'test-tag') == 3
|
||||||
|
assert res.data.count(b'another-tag') == 3
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||||
|
assert b'All tags deleted' in res.data
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from . util import live_server_setup
|
from .util import live_server_setup, wait_for_all_checks
|
||||||
from changedetectionio import html_tools
|
from changedetectionio import html_tools
|
||||||
|
|
||||||
def test_setup(live_server):
|
def test_setup(live_server):
|
||||||
@@ -84,7 +84,6 @@ def set_modified_ignore_response():
|
|||||||
|
|
||||||
|
|
||||||
def test_check_ignore_text_functionality(client, live_server):
|
def test_check_ignore_text_functionality(client, live_server):
|
||||||
sleep_time_for_fetch_thread = 3
|
|
||||||
|
|
||||||
# Use a mix of case in ZzZ to prove it works case-insensitive.
|
# Use a mix of case in ZzZ to prove it works case-insensitive.
|
||||||
ignore_text = "XXXXX\r\nYYYYY\r\nzZzZZ\r\nnew ignore stuff"
|
ignore_text = "XXXXX\r\nYYYYY\r\nzZzZZ\r\nnew ignore stuff"
|
||||||
@@ -103,7 +102,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
@@ -124,7 +123,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
|||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should report nothing found (no new 'unviewed' class)
|
# It should report nothing found (no new 'unviewed' class)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -137,7 +136,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
|||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should report nothing found (no new 'unviewed' class)
|
# It should report nothing found (no new 'unviewed' class)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -151,7 +150,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
|||||||
# Just to be sure.. set a regular modified change..
|
# Just to be sure.. set a regular modified change..
|
||||||
set_modified_original_ignore_response()
|
set_modified_original_ignore_response()
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
@@ -167,7 +166,6 @@ def test_check_ignore_text_functionality(client, live_server):
|
|||||||
assert b'Deleted' in res.data
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
def test_check_global_ignore_text_functionality(client, live_server):
|
def test_check_global_ignore_text_functionality(client, live_server):
|
||||||
sleep_time_for_fetch_thread = 3
|
|
||||||
|
|
||||||
# Give the endpoint time to spin up
|
# Give the endpoint time to spin up
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@@ -198,7 +196,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
|
||||||
# Goto the edit page of the item, add our ignore text
|
# Goto the edit page of the item, add our ignore text
|
||||||
@@ -220,7 +218,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
|||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# so that we are sure everything is viewed and in a known 'nothing changed' state
|
# so that we are sure everything is viewed and in a known 'nothing changed' state
|
||||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||||
@@ -237,7 +235,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
|||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should report nothing found (no new 'unviewed' class)
|
# It should report nothing found (no new 'unviewed' class)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -247,7 +245,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
|||||||
# Just to be sure.. set a regular modified change that will trigger it
|
# Just to be sure.. set a regular modified change that will trigger it
|
||||||
set_modified_original_ignore_response()
|
set_modified_original_ignore_response()
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from . util import live_server_setup
|
from .util import live_server_setup, wait_for_all_checks
|
||||||
|
|
||||||
|
|
||||||
def test_setup(live_server):
|
def test_setup(live_server):
|
||||||
@@ -40,7 +40,7 @@ def set_some_changed_response():
|
|||||||
|
|
||||||
|
|
||||||
def test_normal_page_check_works_with_ignore_status_code(client, live_server):
|
def test_normal_page_check_works_with_ignore_status_code(client, live_server):
|
||||||
sleep_time_for_fetch_thread = 3
|
|
||||||
|
|
||||||
# Give the endpoint time to spin up
|
# Give the endpoint time to spin up
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@@ -68,15 +68,15 @@ def test_normal_page_check_works_with_ignore_status_code(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
set_some_changed_response()
|
set_some_changed_response()
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should report nothing found (no new 'unviewed' class)
|
# It should report nothing found (no new 'unviewed' class)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -109,13 +109,13 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server):
|
|||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"ignore_status_codes": "y", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
data={"ignore_status_codes": "y", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Make a change
|
# Make a change
|
||||||
set_some_changed_response()
|
set_some_changed_response()
|
||||||
@@ -123,7 +123,7 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server):
|
|||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should have 'unviewed' still
|
# It should have 'unviewed' still
|
||||||
# Because it should be looking at only that 'sametext' id
|
# Because it should be looking at only that 'sametext' id
|
||||||
|
|||||||
@@ -112,6 +112,7 @@ def test_import_distillio(client, live_server):
|
|||||||
# did the tags work?
|
# did the tags work?
|
||||||
res = client.get( url_for("index"))
|
res = client.get( url_for("index"))
|
||||||
|
|
||||||
|
# check tags
|
||||||
assert b"nice stuff" in res.data
|
assert b"nice stuff" in res.data
|
||||||
assert b"nerd-news" in res.data
|
assert b"nerd-news" in res.data
|
||||||
|
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ def test_jinja2_in_url_query(client, live_server):
|
|||||||
"date={% now 'Europe/Berlin', '%Y' %}.{% now 'Europe/Berlin', '%m' %}.{% now 'Europe/Berlin', '%d' %}", )
|
"date={% now 'Europe/Berlin', '%Y' %}.{% now 'Europe/Berlin', '%m' %}.{% now 'Europe/Berlin', '%d' %}", )
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": full_url, "tag": "test"},
|
data={"url": full_url, "tags": "test"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Watch added" in res.data
|
assert b"Watch added" in res.data
|
||||||
|
|||||||
@@ -208,7 +208,7 @@ def test_check_json_without_filter(client, live_server):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
res = client.get(
|
res = client.get(
|
||||||
url_for("preview_page", uuid="first"),
|
url_for("preview_page", uuid="first"),
|
||||||
@@ -238,7 +238,7 @@ def check_json_filter(json_filter, client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
@@ -246,7 +246,7 @@ def check_json_filter(json_filter, client, live_server):
|
|||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": json_filter,
|
data={"include_filters": json_filter,
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"fetch_backend": "html_requests"
|
"fetch_backend": "html_requests"
|
||||||
},
|
},
|
||||||
@@ -261,14 +261,14 @@ def check_json_filter(json_filter, client, live_server):
|
|||||||
assert bytes(escape(json_filter).encode('utf-8')) in res.data
|
assert bytes(escape(json_filter).encode('utf-8')) in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
# Make a change
|
# Make a change
|
||||||
set_modified_response()
|
set_modified_response()
|
||||||
|
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(4)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should have 'unviewed' still
|
# It should have 'unviewed' still
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -306,14 +306,14 @@ def check_json_filter_bool_val(json_filter, client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": json_filter,
|
data={"include_filters": json_filter,
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"fetch_backend": "html_requests"
|
"fetch_backend": "html_requests"
|
||||||
},
|
},
|
||||||
@@ -322,14 +322,14 @@ def check_json_filter_bool_val(json_filter, client, live_server):
|
|||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
# Make a change
|
# Make a change
|
||||||
set_modified_response()
|
set_modified_response()
|
||||||
|
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||||
# But the change should be there, tho its hard to test the change was detected because it will show old and new versions
|
# But the change should be there, tho its hard to test the change was detected because it will show old and new versions
|
||||||
@@ -366,7 +366,7 @@ def check_json_ext_filter(json_filter, client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
@@ -374,7 +374,7 @@ def check_json_ext_filter(json_filter, client, live_server):
|
|||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": json_filter,
|
data={"include_filters": json_filter,
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"fetch_backend": "html_requests"
|
"fetch_backend": "html_requests"
|
||||||
},
|
},
|
||||||
@@ -389,14 +389,14 @@ def check_json_ext_filter(json_filter, client, live_server):
|
|||||||
assert bytes(escape(json_filter).encode('utf-8')) in res.data
|
assert bytes(escape(json_filter).encode('utf-8')) in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
# Make a change
|
# Make a change
|
||||||
set_modified_ext_response()
|
set_modified_ext_response()
|
||||||
|
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(4)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should have 'unviewed'
|
# It should have 'unviewed'
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -428,14 +428,14 @@ def test_ignore_json_order(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
f.write('{"world" : 123, "hello": 123}')
|
f.write('{"world" : 123, "hello": 123}')
|
||||||
|
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' not in res.data
|
assert b'unviewed' not in res.data
|
||||||
@@ -446,7 +446,7 @@ def test_ignore_json_order(client, live_server):
|
|||||||
|
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
|
|||||||
@@ -3,7 +3,8 @@ import os
|
|||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from . util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup
|
from .util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup, wait_for_all_checks, \
|
||||||
|
set_longer_modified_response
|
||||||
from . util import extract_UUID_from_client
|
from . util import extract_UUID_from_client
|
||||||
import logging
|
import logging
|
||||||
import base64
|
import base64
|
||||||
@@ -21,11 +22,9 @@ def test_setup(live_server):
|
|||||||
# Hard to just add more live server URLs when one test is already running (I think)
|
# Hard to just add more live server URLs when one test is already running (I think)
|
||||||
# So we add our test here (was in a different file)
|
# So we add our test here (was in a different file)
|
||||||
def test_check_notification(client, live_server):
|
def test_check_notification(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
set_original_response()
|
set_original_response()
|
||||||
|
|
||||||
# Give the endpoint time to spin up
|
|
||||||
time.sleep(3)
|
|
||||||
|
|
||||||
# Re 360 - new install should have defaults set
|
# Re 360 - new install should have defaults set
|
||||||
res = client.get(url_for("settings_page"))
|
res = client.get(url_for("settings_page"))
|
||||||
notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json')
|
notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json')
|
||||||
@@ -62,13 +61,13 @@ def test_check_notification(client, live_server):
|
|||||||
test_url = url_for('test_endpoint', _external=True)
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": test_url, "tag": ''},
|
data={"url": test_url, "tags": ''},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Watch added" in res.data
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick up the first version
|
# Give the thread time to pick up the first version
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# We write the PNG to disk, but a JPEG should appear in the notification
|
# We write the PNG to disk, but a JPEG should appear in the notification
|
||||||
# Write the last screenshot png
|
# Write the last screenshot png
|
||||||
@@ -105,7 +104,7 @@ def test_check_notification(client, live_server):
|
|||||||
|
|
||||||
notification_form_data.update({
|
notification_form_data.update({
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "my tag",
|
"tags": "my tag, my second tag",
|
||||||
"title": "my title",
|
"title": "my title",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"fetch_backend": "html_requests"})
|
"fetch_backend": "html_requests"})
|
||||||
@@ -128,7 +127,7 @@ def test_check_notification(client, live_server):
|
|||||||
|
|
||||||
|
|
||||||
## Now recheck, and it should have sent the notification
|
## Now recheck, and it should have sent the notification
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
set_modified_response()
|
set_modified_response()
|
||||||
|
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
@@ -141,8 +140,7 @@ def test_check_notification(client, live_server):
|
|||||||
|
|
||||||
# Did we see the URL that had a change, in the notification?
|
# Did we see the URL that had a change, in the notification?
|
||||||
# Diff was correctly executed
|
# Diff was correctly executed
|
||||||
assert test_url in notification_submission
|
|
||||||
assert ':-)' in notification_submission
|
|
||||||
assert "Diff Full: Some initial text" in notification_submission
|
assert "Diff Full: Some initial text" in notification_submission
|
||||||
assert "Diff: (changed) Which is across multiple lines" in notification_submission
|
assert "Diff: (changed) Which is across multiple lines" in notification_submission
|
||||||
assert "(into) which has this one new line" in notification_submission
|
assert "(into) which has this one new line" in notification_submission
|
||||||
@@ -150,12 +148,13 @@ def test_check_notification(client, live_server):
|
|||||||
assert "b'" not in notification_submission
|
assert "b'" not in notification_submission
|
||||||
assert re.search('Watch UUID: [0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}', notification_submission, re.IGNORECASE)
|
assert re.search('Watch UUID: [0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}', notification_submission, re.IGNORECASE)
|
||||||
assert "Watch title: my title" in notification_submission
|
assert "Watch title: my title" in notification_submission
|
||||||
assert "Watch tag: my tag" in notification_submission
|
assert "Watch tag: my tag, my second tag" in notification_submission
|
||||||
assert "diff/" in notification_submission
|
assert "diff/" in notification_submission
|
||||||
assert "preview/" in notification_submission
|
assert "preview/" in notification_submission
|
||||||
assert ":-)" in notification_submission
|
assert ":-)" in notification_submission
|
||||||
assert "New ChangeDetection.io Notification - {}".format(test_url) in notification_submission
|
assert "New ChangeDetection.io Notification - {}".format(test_url) in notification_submission
|
||||||
|
assert test_url in notification_submission
|
||||||
|
assert ':-)' in notification_submission
|
||||||
# Check the attachment was added, and that it is a JPEG from the original PNG
|
# Check the attachment was added, and that it is a JPEG from the original PNG
|
||||||
notification_submission_object = json.loads(notification_submission)
|
notification_submission_object = json.loads(notification_submission)
|
||||||
# We keep PNG screenshots for now
|
# We keep PNG screenshots for now
|
||||||
@@ -193,11 +192,11 @@ def test_check_notification(client, live_server):
|
|||||||
|
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(1)
|
wait_for_all_checks(client)
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(1)
|
wait_for_all_checks(client)
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(1)
|
wait_for_all_checks(client)
|
||||||
assert os.path.exists("test-datastore/notification.txt") == False
|
assert os.path.exists("test-datastore/notification.txt") == False
|
||||||
|
|
||||||
res = client.get(url_for("notification_logs"))
|
res = client.get(url_for("notification_logs"))
|
||||||
@@ -209,7 +208,7 @@ def test_check_notification(client, live_server):
|
|||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={
|
data={
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "my tag",
|
"tags": "my tag",
|
||||||
"title": "my title",
|
"title": "my title",
|
||||||
"notification_urls": '',
|
"notification_urls": '',
|
||||||
"notification_title": '',
|
"notification_title": '',
|
||||||
@@ -243,7 +242,7 @@ def test_notification_validation(client, live_server):
|
|||||||
test_url = url_for('test_endpoint', _external=True)
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": test_url, "tag": 'nice one'},
|
data={"url": test_url, "tags": 'nice one'},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -274,7 +273,7 @@ def test_notification_validation(client, live_server):
|
|||||||
|
|
||||||
|
|
||||||
def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
||||||
time.sleep(1)
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
# test_endpoint - that sends the contents of a file
|
# test_endpoint - that sends the contents of a file
|
||||||
# test_notification_endpoint - that takes a POST and writes it to file (test-datastore/notification.txt)
|
# test_notification_endpoint - that takes a POST and writes it to file (test-datastore/notification.txt)
|
||||||
@@ -285,12 +284,14 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
|||||||
|
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("settings_page"),
|
url_for("settings_page"),
|
||||||
data={"application-notification_title": "New ChangeDetection.io Notification - {{ watch_url }}",
|
data={
|
||||||
"application-notification_body": '{ "url" : "{{ watch_url }}", "secret": 444 }',
|
"application-fetch_backend": "html_requests",
|
||||||
# https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation
|
|
||||||
"application-notification_urls": test_notification_url,
|
|
||||||
"application-minutes_between_check": 180,
|
"application-minutes_between_check": 180,
|
||||||
"application-fetch_backend": "html_requests"
|
"application-notification_body": '{ "url" : "{{ watch_url }}", "secret": 444 }',
|
||||||
|
"application-notification_format": default_notification_format,
|
||||||
|
"application-notification_urls": test_notification_url,
|
||||||
|
# https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation
|
||||||
|
"application-notification_title": "New ChangeDetection.io Notification - {{ watch_url }}",
|
||||||
},
|
},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
@@ -303,21 +304,20 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
|||||||
test_url = url_for('test_endpoint', _external=True)
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": test_url, "tag": 'nice one'},
|
data={"url": test_url, "tags": 'nice one'},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
|
||||||
assert b"Watch added" in res.data
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
set_modified_response()
|
set_modified_response()
|
||||||
|
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
|
||||||
|
|
||||||
with open("test-datastore/notification.txt", 'r') as f:
|
with open("test-datastore/notification.txt", 'r') as f:
|
||||||
x=f.read()
|
x = f.read()
|
||||||
j = json.loads(x)
|
j = json.loads(x)
|
||||||
assert j['url'].startswith('http://localhost')
|
assert j['url'].startswith('http://localhost')
|
||||||
assert j['secret'] == 444
|
assert j['secret'] == 444
|
||||||
@@ -328,5 +328,9 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
|||||||
notification_url = f.read()
|
notification_url = f.read()
|
||||||
assert 'xxx=http' in notification_url
|
assert 'xxx=http' in notification_url
|
||||||
|
|
||||||
os.unlink("test-datastore/notification-url.txt")
|
# Should always be automatically detected as JSON content type even when we set it as 'Text' (default)
|
||||||
|
assert os.path.isfile("test-datastore/notification-content-type.txt")
|
||||||
|
with open("test-datastore/notification-content-type.txt", 'r') as f:
|
||||||
|
assert 'application/json' in f.read()
|
||||||
|
|
||||||
|
os.unlink("test-datastore/notification-url.txt")
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ def test_check_notification_error_handling(client, live_server):
|
|||||||
test_url = url_for('test_endpoint', _external=True)
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": test_url, "tag": ''},
|
data={"url": test_url, "tags": ''},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Watch added" in res.data
|
assert b"Watch added" in res.data
|
||||||
@@ -32,7 +32,7 @@ def test_check_notification_error_handling(client, live_server):
|
|||||||
"notification_body": "xxxxx",
|
"notification_body": "xxxxx",
|
||||||
"notification_format": "Text",
|
"notification_format": "Text",
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"title": "",
|
"title": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"time_between_check-minutes": "180",
|
"time_between_check-minutes": "180",
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ def test_headers_in_request(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(1)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("import_page"),
|
url_for("import_page"),
|
||||||
@@ -43,7 +43,7 @@ def test_headers_in_request(client, live_server):
|
|||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={
|
data={
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
|
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
|
||||||
"headers": "xxx:ooo\ncool:yeah\r\ncookie:"+cookie_header},
|
"headers": "xxx:ooo\ncool:yeah\r\ncookie:"+cookie_header},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
@@ -95,14 +95,14 @@ def test_body_in_request(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# add the first 'version'
|
# add the first 'version'
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={
|
data={
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"method": "POST",
|
"method": "POST",
|
||||||
"fetch_backend": "html_requests",
|
"fetch_backend": "html_requests",
|
||||||
"body": "something something"},
|
"body": "something something"},
|
||||||
@@ -110,7 +110,7 @@ def test_body_in_request(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
|
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Now the change which should trigger a change
|
# Now the change which should trigger a change
|
||||||
body_value = 'Test Body Value'
|
body_value = 'Test Body Value'
|
||||||
@@ -118,7 +118,7 @@ def test_body_in_request(client, live_server):
|
|||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={
|
data={
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"method": "POST",
|
"method": "POST",
|
||||||
"fetch_backend": "html_requests",
|
"fetch_backend": "html_requests",
|
||||||
"body": body_value},
|
"body": body_value},
|
||||||
@@ -126,7 +126,7 @@ def test_body_in_request(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
|
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# The service should echo back the body
|
# The service should echo back the body
|
||||||
res = client.get(
|
res = client.get(
|
||||||
@@ -163,7 +163,7 @@ def test_body_in_request(client, live_server):
|
|||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={
|
data={
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"method": "GET",
|
"method": "GET",
|
||||||
"fetch_backend": "html_requests",
|
"fetch_backend": "html_requests",
|
||||||
"body": "invalid"},
|
"body": "invalid"},
|
||||||
@@ -187,7 +187,7 @@ def test_method_in_request(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("import_page"),
|
url_for("import_page"),
|
||||||
data={"urls": test_url},
|
data={"urls": test_url},
|
||||||
@@ -195,14 +195,14 @@ def test_method_in_request(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Attempt to add a method which is not valid
|
# Attempt to add a method which is not valid
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={
|
data={
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"fetch_backend": "html_requests",
|
"fetch_backend": "html_requests",
|
||||||
"method": "invalid"},
|
"method": "invalid"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
@@ -214,7 +214,7 @@ def test_method_in_request(client, live_server):
|
|||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={
|
data={
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"fetch_backend": "html_requests",
|
"fetch_backend": "html_requests",
|
||||||
"method": "PATCH"},
|
"method": "PATCH"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
@@ -222,7 +222,7 @@ def test_method_in_request(client, live_server):
|
|||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
|
|
||||||
# Give the thread time to pick up the first version
|
# Give the thread time to pick up the first version
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# The service should echo back the request verb
|
# The service should echo back the request verb
|
||||||
res = client.get(
|
res = client.get(
|
||||||
@@ -233,7 +233,7 @@ def test_method_in_request(client, live_server):
|
|||||||
# The test call service will return the verb as the body
|
# The test call service will return the verb as the body
|
||||||
assert b"PATCH" in res.data
|
assert b"PATCH" in res.data
|
||||||
|
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
watches_with_method = 0
|
watches_with_method = 0
|
||||||
with open('test-datastore/url-watches.json') as f:
|
with open('test-datastore/url-watches.json') as f:
|
||||||
@@ -265,7 +265,7 @@ def test_headers_textfile_in_request(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(1)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
|
||||||
# Add some headers to a request
|
# Add some headers to a request
|
||||||
@@ -273,7 +273,7 @@ def test_headers_textfile_in_request(client, live_server):
|
|||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={
|
data={
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "testtag",
|
"tags": "testtag",
|
||||||
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
|
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
|
||||||
"headers": "xxx:ooo\ncool:yeah\r\n"},
|
"headers": "xxx:ooo\ncool:yeah\r\n"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
|
|||||||
74
changedetectionio/tests/test_search.py
Normal file
74
changedetectionio/tests/test_search.py
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
from flask import url_for
|
||||||
|
from .util import set_original_response, set_modified_response, live_server_setup
|
||||||
|
import time
|
||||||
|
|
||||||
|
def test_setup(live_server):
|
||||||
|
live_server_setup(live_server)
|
||||||
|
|
||||||
|
def test_basic_search(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
|
urls = ['https://localhost:12300?first-result=1',
|
||||||
|
'https://localhost:5000?second-result=1'
|
||||||
|
]
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": "\r\n".join(urls)},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"2 Imported" in res.data
|
||||||
|
|
||||||
|
# By URL
|
||||||
|
res = client.get(url_for("index") + "?q=first-res")
|
||||||
|
assert urls[0].encode('utf-8') in res.data
|
||||||
|
assert urls[1].encode('utf-8') not in res.data
|
||||||
|
|
||||||
|
# By Title
|
||||||
|
|
||||||
|
res = client.post(
|
||||||
|
url_for("edit_page", uuid="first"),
|
||||||
|
data={"title": "xxx-title", "url": urls[0], "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Updated watch." in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("index") + "?q=xxx-title")
|
||||||
|
assert urls[0].encode('utf-8') in res.data
|
||||||
|
assert urls[1].encode('utf-8') not in res.data
|
||||||
|
|
||||||
|
|
||||||
|
def test_search_in_tag_limit(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
|
urls = ['https://localhost:12300?first-result=1 tag-one',
|
||||||
|
'https://localhost:5000?second-result=1 tag-two'
|
||||||
|
]
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": "\r\n".join(urls)},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"2 Imported" in res.data
|
||||||
|
|
||||||
|
# By URL
|
||||||
|
|
||||||
|
res = client.get(url_for("index") + "?q=first-res")
|
||||||
|
# Split because of the import tag separation
|
||||||
|
assert urls[0].split(' ')[0].encode('utf-8') in res.data, urls[0].encode('utf-8')
|
||||||
|
assert urls[1].split(' ')[0].encode('utf-8') not in res.data, urls[0].encode('utf-8')
|
||||||
|
|
||||||
|
# By Title
|
||||||
|
res = client.post(
|
||||||
|
url_for("edit_page", uuid="first"),
|
||||||
|
data={"title": "xxx-title", "url": urls[0].split(' ')[0], "tags": urls[0].split(' ')[1], "headers": "",
|
||||||
|
'fetch_backend': "html_requests"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Updated watch." in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("index") + "?q=xxx-title")
|
||||||
|
assert urls[0].split(' ')[0].encode('utf-8') in res.data, urls[0].encode('utf-8')
|
||||||
|
assert urls[1].split(' ')[0].encode('utf-8') not in res.data, urls[0].encode('utf-8')
|
||||||
|
|
||||||
@@ -18,7 +18,7 @@ def test_bad_access(client, live_server):
|
|||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={
|
data={
|
||||||
"url": 'javascript:alert(document.domain)',
|
"url": 'javascript:alert(document.domain)',
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"method": "GET",
|
"method": "GET",
|
||||||
"fetch_backend": "html_requests",
|
"fetch_backend": "html_requests",
|
||||||
"body": ""},
|
"body": ""},
|
||||||
@@ -29,7 +29,7 @@ def test_bad_access(client, live_server):
|
|||||||
|
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": ' javascript:alert(123)', "tag": ''},
|
data={"url": ' javascript:alert(123)', "tags": ''},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -37,7 +37,7 @@ def test_bad_access(client, live_server):
|
|||||||
|
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": '%20%20%20javascript:alert(123)%20%20', "tag": ''},
|
data={"url": '%20%20%20javascript:alert(123)%20%20', "tags": ''},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -46,7 +46,7 @@ def test_bad_access(client, live_server):
|
|||||||
|
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": ' source:javascript:alert(document.domain)', "tag": ''},
|
data={"url": ' source:javascript:alert(document.domain)', "tags": ''},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -56,7 +56,7 @@ def test_bad_access(client, live_server):
|
|||||||
|
|
||||||
client.post(
|
client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": 'file:///tasty/disk/drive', "tag": ''},
|
data={"url": 'file:///tasty/disk/drive', "tags": ''},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ def test_share_watch(client, live_server):
|
|||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": include_filters, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from urllib.request import urlopen
|
from urllib.request import urlopen
|
||||||
from .util import set_original_response, set_modified_response, live_server_setup
|
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
|
||||||
|
|
||||||
sleep_time_for_fetch_thread = 3
|
sleep_time_for_fetch_thread = 3
|
||||||
|
|
||||||
@@ -42,7 +42,7 @@ def test_check_basic_change_detection_functionality_source(client, live_server):
|
|||||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
assert b'1 watches queued for rechecking.' in res.data
|
assert b'1 watches queued for rechecking.' in res.data
|
||||||
|
|
||||||
time.sleep(5)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Now something should be ready, indicated by having a 'unviewed' class
|
# Now something should be ready, indicated by having a 'unviewed' class
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -60,7 +60,7 @@ def test_check_basic_change_detection_functionality_source(client, live_server):
|
|||||||
# `subtractive_selectors` should still work in `source:` type requests
|
# `subtractive_selectors` should still work in `source:` type requests
|
||||||
def test_check_ignore_elements(client, live_server):
|
def test_check_ignore_elements(client, live_server):
|
||||||
set_original_response()
|
set_original_response()
|
||||||
time.sleep(2)
|
time.sleep(1)
|
||||||
test_url = 'source:'+url_for('test_endpoint', _external=True)
|
test_url = 'source:'+url_for('test_endpoint', _external=True)
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
@@ -71,14 +71,14 @@ def test_check_ignore_elements(client, live_server):
|
|||||||
|
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
#####################
|
#####################
|
||||||
# We want <span> and <p> ONLY, but ignore span with .foobar-detection
|
# We want <span> and <p> ONLY, but ignore span with .foobar-detection
|
||||||
|
|
||||||
client.post(
|
client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": 'span,p', "url": test_url, "tag": "", "subtractive_selectors": ".foobar-detection", 'fetch_backend': "html_requests"},
|
data={"include_filters": 'span,p', "url": test_url, "tags": "", "subtractive_selectors": ".foobar-detection", 'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ def test_check_watch_field_storage(client, live_server):
|
|||||||
"title" : "My title",
|
"title" : "My title",
|
||||||
"ignore_text" : "ignore this",
|
"ignore_text" : "ignore this",
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "woohoo",
|
"tags": "woohoo",
|
||||||
"headers": "curl:foo",
|
"headers": "curl:foo",
|
||||||
'fetch_backend': "html_requests"
|
'fetch_backend': "html_requests"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -89,7 +89,7 @@ def test_check_xpath_filter_utf8(client, live_server):
|
|||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
@@ -143,7 +143,7 @@ def test_check_xpath_text_function_utf8(client, live_server):
|
|||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
@@ -189,7 +189,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
|
|||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": xpath_filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
data={"include_filters": xpath_filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
@@ -231,7 +231,7 @@ def test_xpath_validation(client, live_server):
|
|||||||
|
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": "/something horrible", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
data={"include_filters": "/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"is not a valid XPath expression" in res.data
|
assert b"is not a valid XPath expression" in res.data
|
||||||
@@ -261,7 +261,7 @@ def test_check_with_prefix_include_filters(client, live_server):
|
|||||||
|
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
data={"include_filters": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -38,7 +38,25 @@ def set_modified_response():
|
|||||||
f.write(test_return_data)
|
f.write(test_return_data)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
def set_longer_modified_response():
|
||||||
|
test_return_data = """<html>
|
||||||
|
<head><title>modified head title</title></head>
|
||||||
|
<body>
|
||||||
|
Some initial text<br>
|
||||||
|
<p>which has this one new line</p>
|
||||||
|
<br>
|
||||||
|
So let's see what happens. <br>
|
||||||
|
So let's see what happens. <br>
|
||||||
|
So let's see what happens. <br>
|
||||||
|
So let's see what happens. <br>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
|
f.write(test_return_data)
|
||||||
|
|
||||||
|
return None
|
||||||
def set_more_modified_response():
|
def set_more_modified_response():
|
||||||
test_return_data = """<html>
|
test_return_data = """<html>
|
||||||
<head><title>modified head title</title></head>
|
<head><title>modified head title</title></head>
|
||||||
@@ -70,6 +88,16 @@ def extract_api_key_from_UI(client):
|
|||||||
api_key = m.group(1)
|
api_key = m.group(1)
|
||||||
return api_key.strip()
|
return api_key.strip()
|
||||||
|
|
||||||
|
|
||||||
|
# kinda funky, but works for now
|
||||||
|
def get_UUID_for_tag_name(client, name):
|
||||||
|
app_config = client.application.config.get('DATASTORE').data
|
||||||
|
for uuid, tag in app_config['settings']['application'].get('tags', {}).items():
|
||||||
|
if name == tag.get('title', '').lower().strip():
|
||||||
|
return uuid
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
# kinda funky, but works for now
|
# kinda funky, but works for now
|
||||||
def extract_rss_token_from_UI(client):
|
def extract_rss_token_from_UI(client):
|
||||||
import re
|
import re
|
||||||
@@ -177,6 +205,10 @@ def live_server_setup(live_server):
|
|||||||
with open("test-datastore/notification-url.txt", "w") as f:
|
with open("test-datastore/notification-url.txt", "w") as f:
|
||||||
f.write(request.url)
|
f.write(request.url)
|
||||||
|
|
||||||
|
if request.content_type:
|
||||||
|
with open("test-datastore/notification-content-type.txt", "w") as f:
|
||||||
|
f.write(request.content_type)
|
||||||
|
|
||||||
print("\n>> Test notification endpoint was hit.\n", data)
|
print("\n>> Test notification endpoint was hit.\n", data)
|
||||||
return "Text was set"
|
return "Text was set"
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ def test_visual_selector_content_ready(client, live_server):
|
|||||||
|
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": test_url, "tag": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Watch added in Paused state, saving will unpause" in res.data
|
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||||
@@ -28,7 +28,7 @@ def test_visual_selector_content_ready(client, live_server):
|
|||||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||||
data={
|
data={
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
'fetch_backend': "html_webdriver",
|
'fetch_backend': "html_webdriver",
|
||||||
'webdriver_js_execute_code': 'document.querySelector("button[name=test-button]").click();'
|
'webdriver_js_execute_code': 'document.querySelector("button[name=test-button]").click();'
|
||||||
|
|||||||
@@ -26,16 +26,89 @@ class update_worker(threading.Thread):
|
|||||||
self.datastore = datastore
|
self.datastore = datastore
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def send_content_changed_notification(self, t, watch_uuid):
|
def queue_notification_for_watch(self, n_object, watch):
|
||||||
|
|
||||||
from changedetectionio import diff
|
from changedetectionio import diff
|
||||||
|
|
||||||
|
watch_history = watch.history
|
||||||
|
dates = list(watch_history.keys())
|
||||||
|
# Add text that was triggered
|
||||||
|
snapshot_contents = watch.get_history_snapshot(dates[-1])
|
||||||
|
|
||||||
|
# HTML needs linebreak, but MarkDown and Text can use a linefeed
|
||||||
|
if n_object['notification_format'] == 'HTML':
|
||||||
|
line_feed_sep = "<br>"
|
||||||
|
# Snapshot will be plaintext on the disk, convert to some kind of HTML
|
||||||
|
snapshot_contents = snapshot_contents.replace('\n', line_feed_sep)
|
||||||
|
else:
|
||||||
|
line_feed_sep = "\n"
|
||||||
|
|
||||||
|
trigger_text = watch.get('trigger_text', [])
|
||||||
|
triggered_text = ''
|
||||||
|
|
||||||
|
if len(trigger_text):
|
||||||
|
from . import html_tools
|
||||||
|
triggered_text = html_tools.get_triggered_text(content=snapshot_contents, trigger_text=trigger_text)
|
||||||
|
if triggered_text:
|
||||||
|
triggered_text = line_feed_sep.join(triggered_text)
|
||||||
|
|
||||||
|
|
||||||
|
n_object.update({
|
||||||
|
'current_snapshot': snapshot_contents,
|
||||||
|
'diff': diff.render_diff(watch.get_history_snapshot(dates[-2]), watch.get_history_snapshot(dates[-1]), line_feed_sep=line_feed_sep),
|
||||||
|
'diff_added': diff.render_diff(watch.get_history_snapshot(dates[-2]), watch.get_history_snapshot(dates[-1]), include_removed=False, line_feed_sep=line_feed_sep),
|
||||||
|
'diff_full': diff.render_diff(watch.get_history_snapshot(dates[-2]), watch.get_history_snapshot(dates[-1]), include_equal=True, line_feed_sep=line_feed_sep),
|
||||||
|
'diff_removed': diff.render_diff(watch.get_history_snapshot(dates[-2]), watch.get_history_snapshot(dates[-1]), include_added=False, line_feed_sep=line_feed_sep),
|
||||||
|
'screenshot': watch.get_screenshot() if watch.get('notification_screenshot') else None,
|
||||||
|
'triggered_text': triggered_text,
|
||||||
|
'uuid': watch.get('uuid'),
|
||||||
|
'watch_url': watch.get('url'),
|
||||||
|
})
|
||||||
|
logging.info (">> SENDING NOTIFICATION")
|
||||||
|
self.notification_q.put(n_object)
|
||||||
|
|
||||||
|
# Prefer - Individual watch settings > Tag settings > Global settings (in that order)
|
||||||
|
def _check_cascading_vars(self, var_name, watch):
|
||||||
|
|
||||||
from changedetectionio.notification import (
|
from changedetectionio.notification import (
|
||||||
default_notification_format_for_watch
|
default_notification_format_for_watch,
|
||||||
|
default_notification_body,
|
||||||
|
default_notification_title
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Would be better if this was some kind of Object where Watch can reference the parent datastore etc
|
||||||
|
v = watch.get(var_name)
|
||||||
|
if v and not watch.get('notification_muted'):
|
||||||
|
if var_name == 'notification_format' and v == default_notification_format_for_watch:
|
||||||
|
return self.datastore.data['settings']['application'].get('notification_format')
|
||||||
|
|
||||||
|
return v
|
||||||
|
|
||||||
|
tags = self.datastore.get_all_tags_for_watch(uuid=watch.get('uuid'))
|
||||||
|
if tags:
|
||||||
|
for tag_uuid, tag in tags.items():
|
||||||
|
v = tag.get(var_name)
|
||||||
|
if v and not tag.get('notification_muted'):
|
||||||
|
return v
|
||||||
|
|
||||||
|
if self.datastore.data['settings']['application'].get(var_name):
|
||||||
|
return self.datastore.data['settings']['application'].get(var_name)
|
||||||
|
|
||||||
|
# Otherwise could be defaults
|
||||||
|
if var_name == 'notification_format':
|
||||||
|
return default_notification_format_for_watch
|
||||||
|
if var_name == 'notification_body':
|
||||||
|
return default_notification_body
|
||||||
|
if var_name == 'notification_title':
|
||||||
|
return default_notification_title
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def send_content_changed_notification(self, watch_uuid):
|
||||||
|
|
||||||
n_object = {}
|
n_object = {}
|
||||||
watch = self.datastore.data['watching'].get(watch_uuid, False)
|
watch = self.datastore.data['watching'].get(watch_uuid)
|
||||||
if not watch:
|
if not watch:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -48,59 +121,27 @@ class update_worker(threading.Thread):
|
|||||||
"History index had 2 or more, but only 1 date loaded, timestamps were not unique? maybe two of the same timestamps got written, needs more delay?"
|
"History index had 2 or more, but only 1 date loaded, timestamps were not unique? maybe two of the same timestamps got written, needs more delay?"
|
||||||
)
|
)
|
||||||
|
|
||||||
n_object['notification_urls'] = watch['notification_urls'] if len(watch['notification_urls']) else \
|
# Should be a better parent getter in the model object
|
||||||
self.datastore.data['settings']['application']['notification_urls']
|
|
||||||
|
|
||||||
n_object['notification_title'] = watch['notification_title'] if watch['notification_title'] else \
|
# Prefer - Individual watch settings > Tag settings > Global settings (in that order)
|
||||||
self.datastore.data['settings']['application']['notification_title']
|
n_object['notification_urls'] = self._check_cascading_vars('notification_urls', watch)
|
||||||
|
n_object['notification_title'] = self._check_cascading_vars('notification_title', watch)
|
||||||
|
n_object['notification_body'] = self._check_cascading_vars('notification_body', watch)
|
||||||
|
n_object['notification_format'] = self._check_cascading_vars('notification_format', watch)
|
||||||
|
|
||||||
n_object['notification_body'] = watch['notification_body'] if watch['notification_body'] else \
|
# (Individual watch) Only prepare to notify if the rules above matched
|
||||||
self.datastore.data['settings']['application']['notification_body']
|
queued = False
|
||||||
|
if n_object and n_object.get('notification_urls'):
|
||||||
|
queued = True
|
||||||
|
self.queue_notification_for_watch(n_object, watch)
|
||||||
|
|
||||||
n_object['notification_format'] = watch['notification_format'] if watch['notification_format'] != default_notification_format_for_watch else \
|
return queued
|
||||||
self.datastore.data['settings']['application']['notification_format']
|
|
||||||
|
|
||||||
|
|
||||||
# Only prepare to notify if the rules above matched
|
|
||||||
if 'notification_urls' in n_object and n_object['notification_urls']:
|
|
||||||
# HTML needs linebreak, but MarkDown and Text can use a linefeed
|
|
||||||
if n_object['notification_format'] == 'HTML':
|
|
||||||
line_feed_sep = "<br>"
|
|
||||||
else:
|
|
||||||
line_feed_sep = "\n"
|
|
||||||
|
|
||||||
# Add text that was triggered
|
|
||||||
snapshot_contents = watch.get_history_snapshot(dates[-1])
|
|
||||||
trigger_text = watch.get('trigger_text', [])
|
|
||||||
triggered_text = ''
|
|
||||||
|
|
||||||
if len(trigger_text):
|
|
||||||
from . import html_tools
|
|
||||||
triggered_text = html_tools.get_triggered_text(content=snapshot_contents, trigger_text=trigger_text)
|
|
||||||
if triggered_text:
|
|
||||||
triggered_text = line_feed_sep.join(triggered_text)
|
|
||||||
|
|
||||||
|
|
||||||
n_object.update({
|
|
||||||
'current_snapshot': snapshot_contents,
|
|
||||||
'diff': diff.render_diff(watch.get_history_snapshot(dates[-2]), watch.get_history_snapshot(dates[-1]), line_feed_sep=line_feed_sep),
|
|
||||||
'diff_added': diff.render_diff(watch.get_history_snapshot(dates[-2]), watch.get_history_snapshot(dates[-1]), include_removed=False, line_feed_sep=line_feed_sep),
|
|
||||||
'diff_full': diff.render_diff(watch.get_history_snapshot(dates[-2]), watch.get_history_snapshot(dates[-1]), include_equal=True, line_feed_sep=line_feed_sep),
|
|
||||||
'diff_removed': diff.render_diff(watch.get_history_snapshot(dates[-2]), watch.get_history_snapshot(dates[-1]), include_added=False, line_feed_sep=line_feed_sep),
|
|
||||||
'screenshot': watch.get_screenshot() if watch.get('notification_screenshot') else None,
|
|
||||||
'triggered_text': triggered_text,
|
|
||||||
'uuid': watch_uuid,
|
|
||||||
'watch_url': watch['url'],
|
|
||||||
})
|
|
||||||
logging.info (">> SENDING NOTIFICATION")
|
|
||||||
self.notification_q.put(n_object)
|
|
||||||
else:
|
|
||||||
logging.info (">> NO Notification sent, notification_url was empty in both watch and system")
|
|
||||||
|
|
||||||
def send_filter_failure_notification(self, watch_uuid):
|
def send_filter_failure_notification(self, watch_uuid):
|
||||||
|
|
||||||
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts')
|
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts')
|
||||||
watch = self.datastore.data['watching'].get(watch_uuid, False)
|
watch = self.datastore.data['watching'].get(watch_uuid)
|
||||||
if not watch:
|
if not watch:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -177,7 +218,7 @@ class update_worker(threading.Thread):
|
|||||||
uuid = queued_item_data.item.get('uuid')
|
uuid = queued_item_data.item.get('uuid')
|
||||||
self.current_uuid = uuid
|
self.current_uuid = uuid
|
||||||
|
|
||||||
if uuid in list(self.datastore.data['watching'].keys()):
|
if uuid in list(self.datastore.data['watching'].keys()) and self.datastore.data['watching'][uuid].get('url'):
|
||||||
changed_detected = False
|
changed_detected = False
|
||||||
contents = b''
|
contents = b''
|
||||||
process_changedetection_results = True
|
process_changedetection_results = True
|
||||||
@@ -338,6 +379,9 @@ class update_worker(threading.Thread):
|
|||||||
if not self.datastore.data['watching'][uuid].get('ignore_status_codes'):
|
if not self.datastore.data['watching'][uuid].get('ignore_status_codes'):
|
||||||
update_obj['consecutive_filter_failures'] = 0
|
update_obj['consecutive_filter_failures'] = 0
|
||||||
|
|
||||||
|
# Everything ran OK, clean off any previous error
|
||||||
|
update_obj['last_error'] = False
|
||||||
|
|
||||||
self.cleanup_error_artifacts(uuid)
|
self.cleanup_error_artifacts(uuid)
|
||||||
|
|
||||||
#
|
#
|
||||||
@@ -360,7 +404,7 @@ class update_worker(threading.Thread):
|
|||||||
# Notifications should only trigger on the second time (first time, we gather the initial snapshot)
|
# Notifications should only trigger on the second time (first time, we gather the initial snapshot)
|
||||||
if watch.history_n >= 2:
|
if watch.history_n >= 2:
|
||||||
if not self.datastore.data['watching'][uuid].get('notification_muted'):
|
if not self.datastore.data['watching'][uuid].get('notification_muted'):
|
||||||
self.send_content_changed_notification(self, watch_uuid=uuid)
|
self.send_content_changed_notification(watch_uuid=uuid)
|
||||||
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -10,7 +10,8 @@ flask~=2.0
|
|||||||
inscriptis~=2.2
|
inscriptis~=2.2
|
||||||
pytz
|
pytz
|
||||||
timeago~=1.0
|
timeago~=1.0
|
||||||
validators
|
validators~=0.21
|
||||||
|
|
||||||
|
|
||||||
# Set these versions together to avoid a RequestsDependencyWarning
|
# Set these versions together to avoid a RequestsDependencyWarning
|
||||||
# >= 2.26 also adds Brotli support if brotli is installed
|
# >= 2.26 also adds Brotli support if brotli is installed
|
||||||
@@ -32,7 +33,7 @@ dnspython<2.3.0
|
|||||||
# jq not available on Windows so must be installed manually
|
# jq not available on Windows so must be installed manually
|
||||||
|
|
||||||
# Notification library
|
# Notification library
|
||||||
apprise~=1.3.0
|
apprise~=1.4.5
|
||||||
|
|
||||||
# apprise mqtt https://github.com/dgtlmoon/changedetection.io/issues/315
|
# apprise mqtt https://github.com/dgtlmoon/changedetection.io/issues/315
|
||||||
paho-mqtt
|
paho-mqtt
|
||||||
@@ -71,3 +72,6 @@ pillow
|
|||||||
# Include pytest, so if theres a support issue we can ask them to run these tests on their setup
|
# Include pytest, so if theres a support issue we can ask them to run these tests on their setup
|
||||||
pytest ~=7.2
|
pytest ~=7.2
|
||||||
pytest-flask ~=1.2
|
pytest-flask ~=1.2
|
||||||
|
|
||||||
|
# Pin jsonschema version to prevent build errors on armv6 while rpds-py wheels aren't available (1708)
|
||||||
|
jsonschema==4.17.3
|
||||||
|
|||||||
Reference in New Issue
Block a user