Compare commits
102 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
764514e5eb | ||
|
|
ad3ffb6ccb | ||
|
|
e051b29bf2 | ||
|
|
126852b778 | ||
|
|
d115b2c858 | ||
|
|
2db04e4211 | ||
|
|
946a556fb6 | ||
|
|
eda23678aa | ||
|
|
273bd45ad7 | ||
|
|
3d1e1025d2 | ||
|
|
5528b7c4b3 | ||
|
|
0dce3f4fec | ||
|
|
af4311a68c | ||
|
|
792fedb8bc | ||
|
|
824748df9e | ||
|
|
c086ec0d68 | ||
|
|
8e207ba438 | ||
|
|
f0823126c8 | ||
|
|
98f56736c1 | ||
|
|
872bd2de85 | ||
|
|
e6de1dd135 | ||
|
|
599291645d | ||
|
|
156d403552 | ||
|
|
7fe0ef7099 | ||
|
|
fe70beeaed | ||
|
|
abf7ed9085 | ||
|
|
19e752e9ba | ||
|
|
684e96f5f1 | ||
|
|
8f321139fd | ||
|
|
7fdae82e46 | ||
|
|
bbc18d8e80 | ||
|
|
d8ee5472f1 | ||
|
|
8fd57280b7 | ||
|
|
0285d00f13 | ||
|
|
f7f98945a2 | ||
|
|
5e2049c538 | ||
|
|
26931e0167 | ||
|
|
5229094e44 | ||
|
|
5a306aa78c | ||
|
|
c8dcc072c8 | ||
|
|
7c97a5a403 | ||
|
|
7dd967be8e | ||
|
|
3607d15185 | ||
|
|
3382b4cb3f | ||
|
|
5f030d3668 | ||
|
|
06975d6d8f | ||
|
|
f58e5b7f19 | ||
|
|
e50eff8e35 | ||
|
|
07a853ce59 | ||
|
|
80f8d23309 | ||
|
|
9f41d15908 | ||
|
|
89797dfe02 | ||
|
|
c905652780 | ||
|
|
99246d3e6d | ||
|
|
f9f69bf0dd | ||
|
|
68efb25e9b | ||
|
|
70606ab05d | ||
|
|
d3c8386874 | ||
|
|
47103d7f3d | ||
|
|
03c671bfff | ||
|
|
e209d9fba0 | ||
|
|
3b43da35ec | ||
|
|
a0665e1f18 | ||
|
|
9ffe7e0eaf | ||
|
|
3e5671a3a2 | ||
|
|
cd1aca9ee3 | ||
|
|
6a589e14f3 | ||
|
|
dbb76f3618 | ||
|
|
4ae27af511 | ||
|
|
e1860549dc | ||
|
|
9765d56a23 | ||
|
|
349111eb35 | ||
|
|
71e50569a0 | ||
|
|
c372942295 | ||
|
|
0aef5483d9 | ||
|
|
c266c64b94 | ||
|
|
32e5498a9d | ||
|
|
0ba7928d58 | ||
|
|
1709e8f936 | ||
|
|
b16d65741c | ||
|
|
1cadcc6d15 | ||
|
|
b58d521d19 | ||
|
|
52225f2ad8 | ||
|
|
7220afab0a | ||
|
|
1c0fe4c23e | ||
|
|
4f6b0eb8a5 | ||
|
|
f707c914b6 | ||
|
|
9cb636e638 | ||
|
|
1d5fe51157 | ||
|
|
c0b49d3be9 | ||
|
|
c4dc85525f | ||
|
|
26159840c8 | ||
|
|
522e9786c6 | ||
|
|
9ce86a2835 | ||
|
|
f9f6300a70 | ||
|
|
7734b22a19 | ||
|
|
da421fe110 | ||
|
|
3e2b55a46f | ||
|
|
7ace259d70 | ||
|
|
aa6ad7bf47 | ||
|
|
40dd29dbc6 | ||
|
|
7debccca73 |
16
.github/workflows/containers.yml
vendored
@@ -96,8 +96,9 @@ jobs:
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
# Looks like this was disabled
|
||||
# provenance: false
|
||||
|
||||
@@ -116,18 +117,11 @@ jobs:
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest
|
||||
ghcr.io/dgtlmoon/changedetection.io:latest
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
# Looks like this was disabled
|
||||
# provenance: false
|
||||
|
||||
- name: Image digest
|
||||
run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
61
.github/workflows/test-only.yml
vendored
@@ -29,8 +29,11 @@ jobs:
|
||||
docker network create changedet-network
|
||||
|
||||
# Selenium+browserless
|
||||
docker run --network changedet-network -d --hostname selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome-debug:3.141.59
|
||||
docker run --network changedet-network -d --hostname browserless -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.53-chrome-stable
|
||||
docker run --network changedet-network -d --hostname selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome:4
|
||||
docker run --network changedet-network -d --name browserless --hostname browserless -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.60-chrome-stable
|
||||
|
||||
# For accessing custom browser tests
|
||||
docker run --network changedet-network -d --name browserless-custom-url --hostname browserless-custom-url -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm --shm-size="2g" browserless/chrome:1.60-chrome-stable
|
||||
|
||||
- name: Build changedetection.io container for testing
|
||||
run: |
|
||||
@@ -48,6 +51,7 @@ jobs:
|
||||
run: |
|
||||
# Unit tests
|
||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_notification_diff'
|
||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_watch_model'
|
||||
|
||||
# All tests
|
||||
docker run --network changedet-network test-changedetectionio bash -c 'cd changedetectionio && ./run_basic_tests.sh'
|
||||
@@ -83,18 +87,69 @@ jobs:
|
||||
run: |
|
||||
cd changedetectionio
|
||||
./run_proxy_tests.sh
|
||||
# And again with PLAYWRIGHT_DRIVER_URL=..
|
||||
cd ..
|
||||
|
||||
- name: Test custom browser URL
|
||||
run: |
|
||||
cd changedetectionio
|
||||
./run_custom_browser_url_tests.sh
|
||||
cd ..
|
||||
|
||||
- name: Test changedetection.io container starts+runs basically without error
|
||||
run: |
|
||||
docker run -p 5556:5000 -d test-changedetectionio
|
||||
docker run --name test-changedetectionio -p 5556:5000 -d test-changedetectionio
|
||||
sleep 3
|
||||
# Should return 0 (no error) when grep finds it
|
||||
curl -s http://localhost:5556 |grep -q checkbox-uuid
|
||||
|
||||
# and IPv6
|
||||
curl -s -g -6 "http://[::1]:5556"|grep -q checkbox-uuid
|
||||
docker kill test-changedetectionio
|
||||
|
||||
- name: Test changedetection.io SIGTERM and SIGINT signal shutdown
|
||||
run: |
|
||||
|
||||
echo SIGINT Shutdown request test
|
||||
docker run --name sig-test -d test-changedetectionio
|
||||
sleep 3
|
||||
echo ">>> Sending SIGINT to sig-test container"
|
||||
docker kill --signal=SIGINT sig-test
|
||||
sleep 3
|
||||
# invert the check (it should be not 0/not running)
|
||||
docker ps
|
||||
# check signal catch(STDOUT) log
|
||||
docker logs sig-test | grep 'Shutdown: Got Signal - SIGINT' || exit 1
|
||||
test -z "`docker ps|grep sig-test`"
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "Looks like container was running when it shouldnt be"
|
||||
docker ps
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# @todo - scan the container log to see the right "graceful shutdown" text exists
|
||||
docker rm sig-test
|
||||
|
||||
echo SIGTERM Shutdown request test
|
||||
docker run --name sig-test -d test-changedetectionio
|
||||
sleep 3
|
||||
echo ">>> Sending SIGTERM to sig-test container"
|
||||
docker kill --signal=SIGTERM sig-test
|
||||
sleep 3
|
||||
# invert the check (it should be not 0/not running)
|
||||
docker ps
|
||||
docker logs sig-test | grep 'Shutdown: Got Signal - SIGTERM' || exit 1
|
||||
test -z "`docker ps|grep sig-test`"
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "Looks like container was running when it shouldnt be"
|
||||
docker ps
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# @todo - scan the container log to see the right "graceful shutdown" text exists
|
||||
docker rm sig-test
|
||||
|
||||
#export WEBDRIVER_URL=http://localhost:4444/wd/hub
|
||||
#pytest tests/fetchers/test_content.py
|
||||
|
||||
15
Dockerfile
@@ -1,5 +1,5 @@
|
||||
# pip dependencies install stage
|
||||
FROM python:3.11-slim-bullseye as builder
|
||||
FROM python:3.11-slim-bookworm as builder
|
||||
|
||||
# See `cryptography` pin comment in requirements.txt
|
||||
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
|
||||
@@ -25,14 +25,13 @@ RUN pip install --target=/dependencies -r /requirements.txt
|
||||
# Playwright is an alternative to Selenium
|
||||
# Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing
|
||||
# https://github.com/dgtlmoon/changedetection.io/pull/1067 also musl/alpine (not supported)
|
||||
RUN pip install --target=/dependencies playwright~=1.27.1 \
|
||||
RUN pip install --target=/dependencies playwright~=1.40 \
|
||||
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
|
||||
|
||||
# Final image stage
|
||||
FROM python:3.11-slim-bullseye
|
||||
FROM python:3.11-slim-bookworm
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
libssl1.1 \
|
||||
libxslt1.1 \
|
||||
# For pdftohtml
|
||||
poppler-utils \
|
||||
@@ -54,12 +53,12 @@ ENV PYTHONPATH=/usr/local
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
# The actual flask app
|
||||
# The actual flask app module
|
||||
COPY changedetectionio /app/changedetectionio
|
||||
|
||||
# The eventlet server wrapper
|
||||
# Starting wrapper
|
||||
COPY changedetection.py /app/changedetection.py
|
||||
|
||||
WORKDIR /app
|
||||
CMD ["python", "./changedetection.py", "-d", "/datastore"]
|
||||
|
||||
|
||||
CMD [ "python", "./changedetection.py" , "-d", "/datastore"]
|
||||
|
||||
@@ -16,3 +16,4 @@ global-exclude venv
|
||||
|
||||
global-exclude test-datastore
|
||||
global-exclude changedetection.io*dist-info
|
||||
global-exclude changedetectionio/tests/proxy_socks5/test-datastore
|
||||
|
||||
20
README.md
@@ -11,9 +11,10 @@ _Live your data-life pro-actively._
|
||||
|
||||

|
||||
|
||||
[**Don't have time? Let us host it for you! try our $8.99/month subscription - use our proxies and support!**](https://changedetection.io) , _half the price of other website change monitoring services!_
|
||||
[**Get started with website page change monitoring straight away. Don't have time? Try our $8.99/month subscription, use our proxies and support!**](https://changedetection.io) , _half the price of other website change monitoring services!_
|
||||
|
||||
- Chrome browser included.
|
||||
- Nothing to install, access via browser login after signup.
|
||||
- Super fast, no registration needed setup.
|
||||
- Get started watching and receiving website change notifications straight away.
|
||||
|
||||
@@ -97,7 +98,7 @@ Please :star: star :star: this project and help it grow! https://github.com/dgtl
|
||||
With Docker composer, just clone this repository and..
|
||||
|
||||
```bash
|
||||
$ docker-compose up -d
|
||||
$ docker compose up -d
|
||||
```
|
||||
|
||||
Docker standalone
|
||||
@@ -136,10 +137,10 @@ docker rm $(docker ps -a -f name=changedetection.io -q)
|
||||
docker run -d --restart always -p "127.0.0.1:5000:5000" -v datastore-volume:/datastore --name changedetection.io dgtlmoon/changedetection.io
|
||||
```
|
||||
|
||||
### docker-compose
|
||||
### docker compose
|
||||
|
||||
```bash
|
||||
docker-compose pull && docker-compose up -d
|
||||
docker compose pull && docker compose up -d
|
||||
```
|
||||
|
||||
See the wiki for more information https://github.com/dgtlmoon/changedetection.io/wiki
|
||||
@@ -232,6 +233,13 @@ See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configura
|
||||
|
||||
Raspberry Pi and linux/arm/v6 linux/arm/v7 arm64 devices are supported! See the wiki for [details](https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver)
|
||||
|
||||
## Import support
|
||||
|
||||
Easily [import your list of websites to watch for changes in Excel .xslx file format](https://changedetection.io/tutorial/how-import-your-website-change-detection-lists-excel), or paste in lists of website URLs as plaintext.
|
||||
|
||||
Excel import is recommended - that way you can better organise tags/groups of websites and other features.
|
||||
|
||||
|
||||
## API Support
|
||||
|
||||
Supports managing the website watch list [via our API](https://changedetection.io/docs/api_v1/index.html)
|
||||
@@ -261,3 +269,7 @@ I offer commercial support, this software is depended on by network security, ae
|
||||
[license-shield]: https://img.shields.io/github/license/dgtlmoon/changedetection.io.svg?style=for-the-badge
|
||||
[release-link]: https://github.com/dgtlmoon/changedetection.io/releases
|
||||
[docker-link]: https://hub.docker.com/r/dgtlmoon/changedetection.io
|
||||
|
||||
## Third-party licenses
|
||||
|
||||
changedetectionio.html_tools.elementpath_tostring: Copyright (c), 2018-2021, SISSA (Scuola Internazionale Superiore di Studi Avanzati), Licensed under [MIT license](https://github.com/sissaschool/elementpath/blob/master/LICENSE)
|
||||
|
||||
@@ -1,44 +1,6 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Entry-point for running from the CLI when not installed via Pip, Pip will handle the console_scripts entry_points's from setup.py
|
||||
# It's recommended to use `pip3 install changedetection.io` and start with `changedetection.py` instead, it will be linkd to your global path.
|
||||
# or Docker.
|
||||
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
|
||||
# Only exists for direct CLI usage
|
||||
|
||||
from changedetectionio import changedetection
|
||||
import multiprocessing
|
||||
import sys
|
||||
import os
|
||||
|
||||
def sigchld_handler(_signo, _stack_frame):
|
||||
import sys
|
||||
print('Shutdown: Got SIGCHLD')
|
||||
# https://stackoverflow.com/questions/40453496/python-multiprocessing-capturing-signals-to-restart-child-processes-or-shut-do
|
||||
pid, status = os.waitpid(-1, os.WNOHANG | os.WUNTRACED | os.WCONTINUED)
|
||||
|
||||
print('Sub-process: pid %d status %d' % (pid, status))
|
||||
if status != 0:
|
||||
sys.exit(1)
|
||||
|
||||
raise SystemExit
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
#signal.signal(signal.SIGCHLD, sigchld_handler)
|
||||
|
||||
# The only way I could find to get Flask to shutdown, is to wrap it and then rely on the subsystem issuing SIGTERM/SIGKILL
|
||||
parse_process = multiprocessing.Process(target=changedetection.main)
|
||||
parse_process.daemon = True
|
||||
parse_process.start()
|
||||
import time
|
||||
|
||||
try:
|
||||
while True:
|
||||
time.sleep(1)
|
||||
if not parse_process.is_alive():
|
||||
# Process died/crashed for some reason, exit with error set
|
||||
sys.exit(1)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
#parse_process.terminate() not needed, because this process will issue it to the sub-process anyway
|
||||
print ("Exited - CTRL+C")
|
||||
import changedetectionio
|
||||
changedetectionio.main()
|
||||
|
||||
@@ -76,7 +76,7 @@ class Watch(Resource):
|
||||
# Properties are not returned as a JSON, so add the required props manually
|
||||
watch['history_n'] = watch.history_n
|
||||
watch['last_changed'] = watch.last_changed
|
||||
|
||||
watch['viewed'] = watch.viewed
|
||||
return watch
|
||||
|
||||
@auth.check_token
|
||||
@@ -280,11 +280,14 @@ class CreateWatch(Resource):
|
||||
if tag_limit and not any(v.get('title').lower() == tag_limit for k, v in tags.items()):
|
||||
continue
|
||||
|
||||
list[uuid] = {'url': watch['url'],
|
||||
'title': watch['title'],
|
||||
'last_checked': watch['last_checked'],
|
||||
'last_changed': watch.last_changed,
|
||||
'last_error': watch['last_error']}
|
||||
list[uuid] = {
|
||||
'last_changed': watch.last_changed,
|
||||
'last_checked': watch['last_checked'],
|
||||
'last_error': watch['last_error'],
|
||||
'title': watch['title'],
|
||||
'url': watch['url'],
|
||||
'viewed': watch.viewed
|
||||
}
|
||||
|
||||
if request.args.get('recheck_all'):
|
||||
for uuid in self.datastore.data['watching'].keys():
|
||||
@@ -293,6 +296,61 @@ class CreateWatch(Resource):
|
||||
|
||||
return list, 200
|
||||
|
||||
class Import(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
# datastore is a black box dependency
|
||||
self.datastore = kwargs['datastore']
|
||||
|
||||
@auth.check_token
|
||||
def post(self):
|
||||
"""
|
||||
@api {post} /api/v1/import Import a list of watched URLs
|
||||
@apiDescription Accepts a line-feed separated list of URLs to import, additionally with ?tag_uuids=(tag id), ?tag=(name), ?proxy={key}, ?dedupe=true (default true) one URL per line.
|
||||
@apiExample {curl} Example usage:
|
||||
curl http://localhost:5000/api/v1/import --data-binary @list-of-sites.txt -H"x-api-key:8a111a21bc2f8f1dd9b9353bbd46049a"
|
||||
@apiName Import
|
||||
@apiGroup Watch
|
||||
@apiSuccess (200) {List} OK List of watch UUIDs added
|
||||
@apiSuccess (500) {String} ERR Some other error
|
||||
"""
|
||||
|
||||
extras = {}
|
||||
|
||||
if request.args.get('proxy'):
|
||||
plist = self.datastore.proxy_list
|
||||
if not request.args.get('proxy') in plist:
|
||||
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
|
||||
else:
|
||||
extras['proxy'] = request.args.get('proxy')
|
||||
|
||||
dedupe = strtobool(request.args.get('dedupe', 'true'))
|
||||
|
||||
tags = request.args.get('tag')
|
||||
tag_uuids = request.args.get('tag_uuids')
|
||||
|
||||
if tag_uuids:
|
||||
tag_uuids = tag_uuids.split(',')
|
||||
|
||||
urls = request.get_data().decode('utf8').splitlines()
|
||||
added = []
|
||||
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
|
||||
for url in urls:
|
||||
url = url.strip()
|
||||
if not len(url):
|
||||
continue
|
||||
|
||||
# If hosts that only contain alphanumerics are allowed ("localhost" for example)
|
||||
if not validators.url(url, simple_host=allow_simplehost):
|
||||
return f"Invalid or unsupported URL - {url}", 400
|
||||
|
||||
if dedupe and self.datastore.url_exists(url):
|
||||
continue
|
||||
|
||||
new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags, tag_uuids=tag_uuids)
|
||||
added.append(new_uuid)
|
||||
|
||||
return added
|
||||
|
||||
class SystemInfo(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
# datastore is a black box dependency
|
||||
|
||||
@@ -23,10 +23,11 @@
|
||||
|
||||
from distutils.util import strtobool
|
||||
from flask import Blueprint, request, make_response
|
||||
import os
|
||||
import logging
|
||||
import os
|
||||
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
from changedetectionio import login_optionally_required
|
||||
from changedetectionio.flask_app import login_optionally_required
|
||||
|
||||
browsersteps_sessions = {}
|
||||
io_interface_context = None
|
||||
@@ -44,7 +45,7 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
|
||||
|
||||
# We keep the playwright session open for many minutes
|
||||
seconds_keepalive = int(os.getenv('BROWSERSTEPS_MINUTES_KEEPALIVE', 10)) * 60
|
||||
keepalive_seconds = int(os.getenv('BROWSERSTEPS_MINUTES_KEEPALIVE', 10)) * 60
|
||||
|
||||
browsersteps_start_session = {'start_time': time.time()}
|
||||
|
||||
@@ -56,16 +57,18 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
# Start the Playwright context, which is actually a nodejs sub-process and communicates over STDIN/STDOUT pipes
|
||||
io_interface_context = io_interface_context.start()
|
||||
|
||||
keepalive_ms = ((keepalive_seconds + 3) * 1000)
|
||||
base_url = os.getenv('PLAYWRIGHT_DRIVER_URL', '')
|
||||
a = "?" if not '?' in base_url else '&'
|
||||
base_url += a + f"timeout={keepalive_ms}"
|
||||
|
||||
# keep it alive for 10 seconds more than we advertise, sometimes it helps to keep it shutting down cleanly
|
||||
keepalive = "&timeout={}".format(((seconds_keepalive + 3) * 1000))
|
||||
try:
|
||||
browsersteps_start_session['browser'] = io_interface_context.chromium.connect_over_cdp(
|
||||
os.getenv('PLAYWRIGHT_DRIVER_URL', '') + keepalive)
|
||||
browsersteps_start_session['browser'] = io_interface_context.chromium.connect_over_cdp(base_url)
|
||||
except Exception as e:
|
||||
if 'ECONNREFUSED' in str(e):
|
||||
return make_response('Unable to start the Playwright Browser session, is it running?', 401)
|
||||
else:
|
||||
# Other errors, bad URL syntax, bad reply etc
|
||||
return make_response(str(e), 401)
|
||||
|
||||
proxy_id = datastore.get_preferred_proxy_for_watch(uuid=watch_uuid)
|
||||
@@ -118,12 +121,37 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
print("Starting connection with playwright - done")
|
||||
return {'browsersteps_session_id': browsersteps_session_id}
|
||||
|
||||
@login_optionally_required
|
||||
@browser_steps_blueprint.route("/browsersteps_image", methods=['GET'])
|
||||
def browser_steps_fetch_screenshot_image():
|
||||
from flask import (
|
||||
make_response,
|
||||
request,
|
||||
send_from_directory,
|
||||
)
|
||||
uuid = request.args.get('uuid')
|
||||
step_n = int(request.args.get('step_n'))
|
||||
|
||||
watch = datastore.data['watching'].get(uuid)
|
||||
filename = f"step_before-{step_n}.jpeg" if request.args.get('type', '') == 'before' else f"step_{step_n}.jpeg"
|
||||
|
||||
if step_n and watch and os.path.isfile(os.path.join(watch.watch_data_dir, filename)):
|
||||
response = make_response(send_from_directory(directory=watch.watch_data_dir, path=filename))
|
||||
response.headers['Content-type'] = 'image/jpeg'
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
response.headers['Expires'] = 0
|
||||
return response
|
||||
|
||||
else:
|
||||
return make_response('Unable to fetch image, is the URL correct? does the watch exist? does the step_type-n.jpeg exist?', 401)
|
||||
|
||||
# A request for an action was received
|
||||
@login_optionally_required
|
||||
@browser_steps_blueprint.route("/browsersteps_update", methods=['POST'])
|
||||
def browsersteps_ui_update():
|
||||
import base64
|
||||
import playwright._impl._api_types
|
||||
import playwright._impl._errors
|
||||
global browsersteps_sessions
|
||||
from changedetectionio.blueprint.browser_steps import browser_steps
|
||||
|
||||
|
||||
@@ -77,13 +77,13 @@ class steppable_browser_interface():
|
||||
def action_goto_url(self, selector=None, value=None):
|
||||
# self.page.set_viewport_size({"width": 1280, "height": 5000})
|
||||
now = time.time()
|
||||
response = self.page.goto(value, timeout=0, wait_until='commit')
|
||||
|
||||
# Wait_until = commit
|
||||
# - `'commit'` - consider operation to be finished when network response is received and the document started loading.
|
||||
# Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
|
||||
# This seemed to solve nearly all 'TimeoutErrors'
|
||||
response = self.page.goto(value, timeout=0, wait_until='load')
|
||||
# Should be the same as the puppeteer_fetch.js methods, means, load with no timeout set (skip timeout)
|
||||
#and also wait for seconds ?
|
||||
#await page.waitForTimeout(1000);
|
||||
#await page.waitForTimeout(extra_wait_ms);
|
||||
print("Time to goto URL ", time.time() - now)
|
||||
return response
|
||||
|
||||
def action_click_element_containing_text(self, selector=None, value=''):
|
||||
if not len(value.strip()):
|
||||
@@ -99,7 +99,8 @@ class steppable_browser_interface():
|
||||
self.page.fill(selector, value, timeout=10 * 1000)
|
||||
|
||||
def action_execute_js(self, selector, value):
|
||||
self.page.evaluate(value)
|
||||
response = self.page.evaluate(value)
|
||||
return response
|
||||
|
||||
def action_click_element(self, selector, value):
|
||||
print("Clicking element")
|
||||
@@ -109,7 +110,7 @@ class steppable_browser_interface():
|
||||
self.page.click(selector=selector, timeout=30 * 1000, delay=randint(200, 500))
|
||||
|
||||
def action_click_element_if_exists(self, selector, value):
|
||||
import playwright._impl._api_types as _api_types
|
||||
import playwright._impl._errors as _api_types
|
||||
print("Clicking element if exists")
|
||||
if not len(selector.strip()):
|
||||
return
|
||||
@@ -122,6 +123,9 @@ class steppable_browser_interface():
|
||||
return
|
||||
|
||||
def action_click_x_y(self, selector, value):
|
||||
if not re.match(r'^\s?\d+\s?,\s?\d+\s?$', value):
|
||||
raise Exception("'Click X,Y' step should be in the format of '100 , 90'")
|
||||
|
||||
x, y = value.strip().split(',')
|
||||
x = int(float(x.strip()))
|
||||
y = int(float(y.strip()))
|
||||
@@ -138,13 +142,13 @@ class steppable_browser_interface():
|
||||
def action_wait_for_text(self, selector, value):
|
||||
import json
|
||||
v = json.dumps(value)
|
||||
self.page.wait_for_function(f'document.querySelector("body").innerText.includes({v});', timeout=90000)
|
||||
self.page.wait_for_function(f'document.querySelector("body").innerText.includes({v});', timeout=30000)
|
||||
|
||||
def action_wait_for_text_in_element(self, selector, value):
|
||||
import json
|
||||
s = json.dumps(selector)
|
||||
v = json.dumps(value)
|
||||
self.page.wait_for_function(f'document.querySelector({s}).innerText.includes({v});', timeout=90000)
|
||||
self.page.wait_for_function(f'document.querySelector({s}).innerText.includes({v});', timeout=30000)
|
||||
|
||||
# @todo - in the future make some popout interface to capture what needs to be set
|
||||
# https://playwright.dev/python/docs/api/class-keyboard
|
||||
|
||||
@@ -40,8 +40,8 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
contents = ''
|
||||
now = time.time()
|
||||
try:
|
||||
update_handler = text_json_diff.perform_site_check(datastore=datastore)
|
||||
changed_detected, update_obj, contents = update_handler.run(uuid, preferred_proxy=preferred_proxy, skip_when_checksum_same=False)
|
||||
update_handler = text_json_diff.perform_site_check(datastore=datastore, watch_uuid=uuid)
|
||||
update_handler.call_browser()
|
||||
# title, size is len contents not len xfer
|
||||
except content_fetcher.Non200ErrorCodeReceived as e:
|
||||
if e.status_code == 404:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from flask import Blueprint, request, make_response, render_template, flash, url_for, redirect
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
from changedetectionio import login_optionally_required
|
||||
from changedetectionio.flask_app import login_optionally_required
|
||||
|
||||
|
||||
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||
<script>
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test', watch_uuid=uuid)}}";
|
||||
</script>
|
||||
|
||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
@@ -69,11 +69,12 @@ xpath://body/div/span[contains(@class, 'example-class')]",
|
||||
{% endif %}
|
||||
</ul>
|
||||
</li>
|
||||
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash,
|
||||
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash. To specify XPath to be used explicitly or the XPath rule starts with an XPath function: Prefix with <code>xpath:</code>
|
||||
<ul>
|
||||
<li>Example: <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
|
||||
<li>Example: <code>//*[contains(@class, 'sametext')]</code> or <code>xpath:count(//*[contains(@class, 'sametext')])</code>, <a
|
||||
href="http://xpather.com/" target="new">test your XPath here</a></li>
|
||||
<li>Example: Get all titles from an RSS feed <code>//title/text()</code></li>
|
||||
<li>To use XPath1.0: Prefix with <code>xpath1:</code></li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Launch as a eventlet.wsgi server instance.
|
||||
|
||||
from distutils.util import strtobool
|
||||
from json.decoder import JSONDecodeError
|
||||
|
||||
import eventlet
|
||||
import eventlet.wsgi
|
||||
import getopt
|
||||
import os
|
||||
import signal
|
||||
import socket
|
||||
import sys
|
||||
|
||||
from . import store, changedetection_app, content_fetcher
|
||||
from . import __version__
|
||||
|
||||
# Only global so we can access it in the signal handler
|
||||
app = None
|
||||
datastore = None
|
||||
|
||||
def sigterm_handler(_signo, _stack_frame):
|
||||
global app
|
||||
global datastore
|
||||
# app.config.exit.set()
|
||||
print('Shutdown: Got SIGTERM, DB saved to disk')
|
||||
datastore.sync_to_json()
|
||||
# raise SystemExit
|
||||
|
||||
def main():
|
||||
global datastore
|
||||
global app
|
||||
|
||||
datastore_path = None
|
||||
do_cleanup = False
|
||||
host = ''
|
||||
ipv6_enabled = False
|
||||
port = os.environ.get('PORT') or 5000
|
||||
ssl_mode = False
|
||||
|
||||
# On Windows, create and use a default path.
|
||||
if os.name == 'nt':
|
||||
datastore_path = os.path.expandvars(r'%APPDATA%\changedetection.io')
|
||||
os.makedirs(datastore_path, exist_ok=True)
|
||||
else:
|
||||
# Must be absolute so that send_from_directory doesnt try to make it relative to backend/
|
||||
datastore_path = os.path.join(os.getcwd(), "../datastore")
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "6Ccsd:h:p:", "port")
|
||||
except getopt.GetoptError:
|
||||
print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path]')
|
||||
sys.exit(2)
|
||||
|
||||
create_datastore_dir = False
|
||||
|
||||
for opt, arg in opts:
|
||||
if opt == '-s':
|
||||
ssl_mode = True
|
||||
|
||||
if opt == '-h':
|
||||
host = arg
|
||||
|
||||
if opt == '-p':
|
||||
port = int(arg)
|
||||
|
||||
if opt == '-d':
|
||||
datastore_path = arg
|
||||
|
||||
if opt == '-6':
|
||||
print ("Enabling IPv6 listen support")
|
||||
ipv6_enabled = True
|
||||
|
||||
# Cleanup (remove text files that arent in the index)
|
||||
if opt == '-c':
|
||||
do_cleanup = True
|
||||
|
||||
# Create the datadir if it doesnt exist
|
||||
if opt == '-C':
|
||||
create_datastore_dir = True
|
||||
|
||||
# isnt there some @thingy to attach to each route to tell it, that this route needs a datastore
|
||||
app_config = {'datastore_path': datastore_path}
|
||||
|
||||
if not os.path.isdir(app_config['datastore_path']):
|
||||
if create_datastore_dir:
|
||||
os.mkdir(app_config['datastore_path'])
|
||||
else:
|
||||
print(
|
||||
"ERROR: Directory path for the datastore '{}' does not exist, cannot start, please make sure the directory exists or specify a directory with the -d option.\n"
|
||||
"Or use the -C parameter to create the directory.".format(app_config['datastore_path']), file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
try:
|
||||
datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], version_tag=__version__)
|
||||
except JSONDecodeError as e:
|
||||
# Dont' start if the JSON DB looks corrupt
|
||||
print ("ERROR: JSON DB or Proxy List JSON at '{}' appears to be corrupt, aborting".format(app_config['datastore_path']))
|
||||
print(str(e))
|
||||
return
|
||||
|
||||
app = changedetection_app(app_config, datastore)
|
||||
|
||||
signal.signal(signal.SIGTERM, sigterm_handler)
|
||||
|
||||
# Go into cleanup mode
|
||||
if do_cleanup:
|
||||
datastore.remove_unused_snapshots()
|
||||
|
||||
app.config['datastore_path'] = datastore_path
|
||||
|
||||
|
||||
@app.context_processor
|
||||
def inject_version():
|
||||
return dict(right_sticky="v{}".format(datastore.data['version_tag']),
|
||||
new_version_available=app.config['NEW_VERSION_AVAILABLE'],
|
||||
has_password=datastore.data['settings']['application']['password'] != False
|
||||
)
|
||||
|
||||
# Monitored websites will not receive a Referer header when a user clicks on an outgoing link.
|
||||
# @Note: Incompatible with password login (and maybe other features) for now, submit a PR!
|
||||
@app.after_request
|
||||
def hide_referrer(response):
|
||||
if strtobool(os.getenv("HIDE_REFERER", 'false')):
|
||||
response.headers["Referrer-Policy"] = "no-referrer"
|
||||
|
||||
return response
|
||||
|
||||
# Proxy sub-directory support
|
||||
# Set environment var USE_X_SETTINGS=1 on this script
|
||||
# And then in your proxy_pass settings
|
||||
#
|
||||
# proxy_set_header Host "localhost";
|
||||
# proxy_set_header X-Forwarded-Prefix /app;
|
||||
|
||||
if os.getenv('USE_X_SETTINGS'):
|
||||
print ("USE_X_SETTINGS is ENABLED\n")
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app, x_prefix=1, x_host=1)
|
||||
|
||||
s_type = socket.AF_INET6 if ipv6_enabled else socket.AF_INET
|
||||
|
||||
if ssl_mode:
|
||||
# @todo finalise SSL config, but this should get you in the right direction if you need it.
|
||||
eventlet.wsgi.server(eventlet.wrap_ssl(eventlet.listen((host, port), s_type),
|
||||
certfile='cert.pem',
|
||||
keyfile='privkey.pem',
|
||||
server_side=True), app)
|
||||
|
||||
else:
|
||||
eventlet.wsgi.server(eventlet.listen((host, int(port)), s_type), app)
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import hashlib
|
||||
from abc import abstractmethod
|
||||
from distutils.util import strtobool
|
||||
from urllib.parse import urlparse
|
||||
import chardet
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import requests
|
||||
import sys
|
||||
import time
|
||||
import urllib.parse
|
||||
|
||||
visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4, header, footer, section, article, aside, details, main, nav, section, summary'
|
||||
|
||||
@@ -40,9 +43,11 @@ class JSActionExceptions(Exception):
|
||||
return
|
||||
|
||||
|
||||
class BrowserStepsStepTimout(Exception):
|
||||
def __init__(self, step_n):
|
||||
class BrowserStepsStepException(Exception):
|
||||
def __init__(self, step_n, original_e):
|
||||
self.step_n = step_n
|
||||
self.original_e = original_e
|
||||
print(f"Browser Steps exception at step {self.step_n}", str(original_e))
|
||||
return
|
||||
|
||||
|
||||
@@ -88,18 +93,20 @@ class ReplyWithContentButNoText(Exception):
|
||||
|
||||
|
||||
class Fetcher():
|
||||
browser_connection_is_custom = None
|
||||
browser_connection_url = None
|
||||
browser_steps = None
|
||||
browser_steps_screenshot_path = None
|
||||
content = None
|
||||
error = None
|
||||
fetcher_description = "No description"
|
||||
headers = {}
|
||||
instock_data = None
|
||||
instock_data_js = ""
|
||||
status_code = None
|
||||
webdriver_js_execute_code = None
|
||||
xpath_data = None
|
||||
xpath_element_js = ""
|
||||
instock_data = None
|
||||
instock_data_js = ""
|
||||
|
||||
# Will be needed in the future by the VisualSelector, always get this where possible.
|
||||
screenshot = False
|
||||
@@ -156,9 +163,19 @@ class Fetcher():
|
||||
"""
|
||||
return {k.lower(): v for k, v in self.headers.items()}
|
||||
|
||||
def browser_steps_get_valid_steps(self):
|
||||
if self.browser_steps is not None and len(self.browser_steps):
|
||||
valid_steps = filter(
|
||||
lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
|
||||
self.browser_steps)
|
||||
|
||||
return valid_steps
|
||||
|
||||
return None
|
||||
|
||||
def iterate_browser_steps(self):
|
||||
from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface
|
||||
from playwright._impl._api_types import TimeoutError
|
||||
from playwright._impl._errors import TimeoutError, Error
|
||||
from jinja2 import Environment
|
||||
jinja2_env = Environment(extensions=['jinja2_time.TimeExtension'])
|
||||
|
||||
@@ -167,10 +184,7 @@ class Fetcher():
|
||||
if self.browser_steps is not None and len(self.browser_steps):
|
||||
interface = steppable_browser_interface()
|
||||
interface.page = self.page
|
||||
|
||||
valid_steps = filter(
|
||||
lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
|
||||
self.browser_steps)
|
||||
valid_steps = self.browser_steps_get_valid_steps()
|
||||
|
||||
for step in valid_steps:
|
||||
step_n += 1
|
||||
@@ -191,10 +205,10 @@ class Fetcher():
|
||||
optional_value=optional_value)
|
||||
self.screenshot_step(step_n)
|
||||
self.save_step_html(step_n)
|
||||
except TimeoutError as e:
|
||||
print(str(e))
|
||||
|
||||
except (Error, TimeoutError) as e:
|
||||
# Stop processing here
|
||||
raise BrowserStepsStepTimout(step_n=step_n)
|
||||
raise BrowserStepsStepException(step_n=step_n, original_e=e)
|
||||
|
||||
# It's always good to reset these
|
||||
def delete_browser_steps_screenshots(self):
|
||||
@@ -241,14 +255,19 @@ class base_html_playwright(Fetcher):
|
||||
|
||||
proxy = None
|
||||
|
||||
def __init__(self, proxy_override=None):
|
||||
def __init__(self, proxy_override=None, custom_browser_connection_url=None):
|
||||
super().__init__()
|
||||
# .strip('"') is going to save someone a lot of time when they accidently wrap the env value
|
||||
|
||||
self.browser_type = os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').strip('"')
|
||||
self.command_executor = os.getenv(
|
||||
"PLAYWRIGHT_DRIVER_URL",
|
||||
'ws://playwright-chrome:3000'
|
||||
).strip('"')
|
||||
|
||||
if custom_browser_connection_url:
|
||||
self.browser_connection_is_custom = True
|
||||
self.browser_connection_url = custom_browser_connection_url
|
||||
else:
|
||||
# Fallback to fetching from system
|
||||
# .strip('"') is going to save someone a lot of time when they accidently wrap the env value
|
||||
self.browser_connection_url = os.getenv("PLAYWRIGHT_DRIVER_URL", 'ws://playwright-chrome:3000').strip('"')
|
||||
|
||||
|
||||
# If any proxy settings are enabled, then we should setup the proxy object
|
||||
proxy_args = {}
|
||||
@@ -266,7 +285,6 @@ class base_html_playwright(Fetcher):
|
||||
|
||||
if self.proxy:
|
||||
# Playwright needs separate username and password values
|
||||
from urllib.parse import urlparse
|
||||
parsed = urlparse(self.proxy.get('server'))
|
||||
if parsed.username:
|
||||
self.proxy['username'] = parsed.username
|
||||
@@ -321,13 +339,11 @@ class base_html_playwright(Fetcher):
|
||||
|
||||
# Append proxy connect string
|
||||
if self.proxy:
|
||||
import urllib.parse
|
||||
# Remove username/password if it exists in the URL or you will receive "ERR_NO_SUPPORTED_PROXIES" error
|
||||
# Actual authentication handled by Puppeteer/node
|
||||
o = urlparse(self.proxy.get('server'))
|
||||
proxy_url = urllib.parse.quote(o._replace(netloc="{}:{}".format(o.hostname, o.port)).geturl())
|
||||
browserless_function_url = f"{browserless_function_url}&--proxy-server={proxy_url}&dumpio=true"
|
||||
|
||||
browserless_function_url = f"{browserless_function_url}&--proxy-server={proxy_url}"
|
||||
|
||||
try:
|
||||
amp = '&' if '?' in browserless_function_url else '?'
|
||||
@@ -347,7 +363,7 @@ class base_html_playwright(Fetcher):
|
||||
'url': url,
|
||||
'user_agent': {k.lower(): v for k, v in request_headers.items()}.get('user-agent', None),
|
||||
'proxy_username': self.proxy.get('username', '') if self.proxy else False,
|
||||
'proxy_password': self.proxy.get('password', '') if self.proxy else False,
|
||||
'proxy_password': self.proxy.get('password', '') if self.proxy and self.proxy.get('username') else False,
|
||||
'no_cache_list': [
|
||||
'twitter',
|
||||
'.pdf'
|
||||
@@ -411,13 +427,11 @@ class base_html_playwright(Fetcher):
|
||||
current_include_filters=None,
|
||||
is_binary=False):
|
||||
|
||||
# For now, USE_EXPERIMENTAL_PUPPETEER_FETCH is not supported by watches with BrowserSteps (for now!)
|
||||
has_browser_steps = self.browser_steps and list(filter(
|
||||
lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
|
||||
self.browser_steps))
|
||||
|
||||
if not has_browser_steps:
|
||||
if os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH'):
|
||||
# For now, USE_EXPERIMENTAL_PUPPETEER_FETCH is not supported by watches with BrowserSteps (for now!)
|
||||
# browser_connection_is_custom doesnt work with puppeteer style fetch (use playwright native too in this case)
|
||||
if not self.browser_connection_is_custom and not self.browser_steps and os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH'):
|
||||
if strtobool(os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH')):
|
||||
# Temporary backup solution until we rewrite the playwright code
|
||||
return self.run_fetch_browserless_puppeteer(
|
||||
url,
|
||||
@@ -430,17 +444,21 @@ class base_html_playwright(Fetcher):
|
||||
is_binary)
|
||||
|
||||
from playwright.sync_api import sync_playwright
|
||||
import playwright._impl._api_types
|
||||
import playwright._impl._errors
|
||||
|
||||
self.delete_browser_steps_screenshots()
|
||||
response = None
|
||||
|
||||
with sync_playwright() as p:
|
||||
browser_type = getattr(p, self.browser_type)
|
||||
|
||||
# Seemed to cause a connection Exception even tho I can see it connect
|
||||
# self.browser = browser_type.connect(self.command_executor, timeout=timeout*1000)
|
||||
# 60,000 connection timeout only
|
||||
browser = browser_type.connect_over_cdp(self.command_executor, timeout=60000)
|
||||
browser = browser_type.connect_over_cdp(self.browser_connection_url, timeout=60000)
|
||||
|
||||
# SOCKS5 with authentication is not supported (yet)
|
||||
# https://github.com/microsoft/playwright/issues/10567
|
||||
|
||||
# Set user agent to prevent Cloudflare from blocking the browser
|
||||
# Use the default one configured in the App.py model that's passed from fetch_site_status.py
|
||||
@@ -459,41 +477,27 @@ class base_html_playwright(Fetcher):
|
||||
if len(request_headers):
|
||||
context.set_extra_http_headers(request_headers)
|
||||
|
||||
self.page.set_default_navigation_timeout(90000)
|
||||
self.page.set_default_timeout(90000)
|
||||
# Listen for all console events and handle errors
|
||||
self.page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}"))
|
||||
|
||||
# Listen for all console events and handle errors
|
||||
self.page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}"))
|
||||
# Re-use as much code from browser steps as possible so its the same
|
||||
from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface
|
||||
browsersteps_interface = steppable_browser_interface()
|
||||
browsersteps_interface.page = self.page
|
||||
|
||||
# Goto page
|
||||
try:
|
||||
# Wait_until = commit
|
||||
# - `'commit'` - consider operation to be finished when network response is received and the document started loading.
|
||||
# Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
|
||||
# This seemed to solve nearly all 'TimeoutErrors'
|
||||
response = self.page.goto(url, wait_until='commit')
|
||||
except playwright._impl._api_types.Error as e:
|
||||
# Retry once - https://github.com/browserless/chrome/issues/2485
|
||||
# Sometimes errors related to invalid cert's and other can be random
|
||||
print("Content Fetcher > retrying request got error - ", str(e))
|
||||
time.sleep(1)
|
||||
response = self.page.goto(url, wait_until='commit')
|
||||
response = browsersteps_interface.action_goto_url(value=url)
|
||||
self.headers = response.all_headers()
|
||||
|
||||
except Exception as e:
|
||||
print("Content Fetcher > Other exception when page.goto", str(e))
|
||||
if response is None:
|
||||
context.close()
|
||||
browser.close()
|
||||
raise PageUnloadable(url=url, status_code=None, message=str(e))
|
||||
print("Content Fetcher > Response object was none")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
# Execute any browser steps
|
||||
try:
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
|
||||
self.page.wait_for_timeout(extra_wait * 1000)
|
||||
|
||||
if self.webdriver_js_execute_code is not None and len(self.webdriver_js_execute_code):
|
||||
self.page.evaluate(self.webdriver_js_execute_code)
|
||||
|
||||
except playwright._impl._api_types.TimeoutError as e:
|
||||
browsersteps_interface.action_execute_js(value=self.webdriver_js_execute_code, selector=None)
|
||||
except playwright._impl._errors.TimeoutError as e:
|
||||
context.close()
|
||||
browser.close()
|
||||
# This can be ok, we will try to grab what we could retrieve
|
||||
@@ -504,28 +508,30 @@ class base_html_playwright(Fetcher):
|
||||
browser.close()
|
||||
raise PageUnloadable(url=url, status_code=None, message=str(e))
|
||||
|
||||
if response is None:
|
||||
context.close()
|
||||
browser.close()
|
||||
print("Content Fetcher > Response object was none")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
# Run Browser Steps here
|
||||
self.iterate_browser_steps()
|
||||
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
|
||||
time.sleep(extra_wait)
|
||||
self.page.wait_for_timeout(extra_wait * 1000)
|
||||
|
||||
|
||||
self.content = self.page.content()
|
||||
self.status_code = response.status
|
||||
|
||||
if self.status_code != 200 and not ignore_status_codes:
|
||||
|
||||
screenshot=self.page.screenshot(type='jpeg', full_page=True,
|
||||
quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)))
|
||||
|
||||
raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, screenshot=screenshot)
|
||||
|
||||
if len(self.page.content().strip()) == 0:
|
||||
context.close()
|
||||
browser.close()
|
||||
print("Content Fetcher > Content was empty")
|
||||
raise EmptyReply(url=url, status_code=response.status)
|
||||
|
||||
self.status_code = response.status
|
||||
self.headers = response.all_headers()
|
||||
# Run Browser Steps here
|
||||
if self.browser_steps_get_valid_steps():
|
||||
self.iterate_browser_steps()
|
||||
|
||||
self.page.wait_for_timeout(extra_wait * 1000)
|
||||
|
||||
# So we can find an element on the page where its selector was entered manually (maybe not xPath etc)
|
||||
if current_include_filters is not None:
|
||||
@@ -537,6 +543,7 @@ class base_html_playwright(Fetcher):
|
||||
"async () => {" + self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) + "}")
|
||||
self.instock_data = self.page.evaluate("async () => {" + self.instock_data_js + "}")
|
||||
|
||||
self.content = self.page.content()
|
||||
# Bug 3 in Playwright screenshot handling
|
||||
# Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it
|
||||
# JPEG is better here because the screenshots can be very very large
|
||||
@@ -551,7 +558,7 @@ class base_html_playwright(Fetcher):
|
||||
except Exception as e:
|
||||
context.close()
|
||||
browser.close()
|
||||
raise ScreenshotUnavailable(url=url, status_code=None)
|
||||
raise ScreenshotUnavailable(url=url, status_code=response.status_code)
|
||||
|
||||
context.close()
|
||||
browser.close()
|
||||
@@ -563,8 +570,6 @@ class base_html_webdriver(Fetcher):
|
||||
else:
|
||||
fetcher_description = "WebDriver Chrome/Javascript"
|
||||
|
||||
command_executor = ''
|
||||
|
||||
# Configs for Proxy setup
|
||||
# In the ENV vars, is prefixed with "webdriver_", so it is for example "webdriver_sslProxy"
|
||||
selenium_proxy_settings_mappings = ['proxyType', 'ftpProxy', 'httpProxy', 'noProxy',
|
||||
@@ -572,12 +577,16 @@ class base_html_webdriver(Fetcher):
|
||||
'socksProxy', 'socksVersion', 'socksUsername', 'socksPassword']
|
||||
proxy = None
|
||||
|
||||
def __init__(self, proxy_override=None):
|
||||
def __init__(self, proxy_override=None, custom_browser_connection_url=None):
|
||||
super().__init__()
|
||||
from selenium.webdriver.common.proxy import Proxy as SeleniumProxy
|
||||
|
||||
# .strip('"') is going to save someone a lot of time when they accidently wrap the env value
|
||||
self.command_executor = os.getenv("WEBDRIVER_URL", 'http://browser-chrome:4444/wd/hub').strip('"')
|
||||
if not custom_browser_connection_url:
|
||||
self.browser_connection_url = os.getenv("WEBDRIVER_URL", 'http://browser-chrome:4444/wd/hub').strip('"')
|
||||
else:
|
||||
self.browser_connection_is_custom = True
|
||||
self.browser_connection_url = custom_browser_connection_url
|
||||
|
||||
# If any proxy settings are enabled, then we should setup the proxy object
|
||||
proxy_args = {}
|
||||
@@ -610,15 +619,17 @@ class base_html_webdriver(Fetcher):
|
||||
is_binary=False):
|
||||
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.webdriver.chrome.options import Options as ChromeOptions
|
||||
from selenium.common.exceptions import WebDriverException
|
||||
# request_body, request_method unused for now, until some magic in the future happens.
|
||||
|
||||
# check env for WEBDRIVER_URL
|
||||
options = ChromeOptions()
|
||||
if self.proxy:
|
||||
options.proxy = self.proxy
|
||||
|
||||
self.driver = webdriver.Remote(
|
||||
command_executor=self.command_executor,
|
||||
desired_capabilities=DesiredCapabilities.CHROME,
|
||||
proxy=self.proxy)
|
||||
command_executor=self.browser_connection_url,
|
||||
options=options)
|
||||
|
||||
try:
|
||||
self.driver.get(url)
|
||||
@@ -650,11 +661,11 @@ class base_html_webdriver(Fetcher):
|
||||
# Does the connection to the webdriver work? run a test connection.
|
||||
def is_ready(self):
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.webdriver.chrome.options import Options as ChromeOptions
|
||||
|
||||
self.driver = webdriver.Remote(
|
||||
command_executor=self.command_executor,
|
||||
desired_capabilities=DesiredCapabilities.CHROME)
|
||||
options=ChromeOptions())
|
||||
|
||||
# driver.quit() seems to cause better exceptions
|
||||
self.quit()
|
||||
@@ -672,8 +683,10 @@ class base_html_webdriver(Fetcher):
|
||||
class html_requests(Fetcher):
|
||||
fetcher_description = "Basic fast Plaintext/HTTP Client"
|
||||
|
||||
def __init__(self, proxy_override=None):
|
||||
def __init__(self, proxy_override=None, custom_browser_connection_url=None):
|
||||
super().__init__()
|
||||
self.proxy_override = proxy_override
|
||||
# browser_connection_url is none because its always 'launched locally'
|
||||
|
||||
def run(self,
|
||||
url,
|
||||
@@ -693,6 +706,10 @@ class html_requests(Fetcher):
|
||||
proxies = {}
|
||||
|
||||
# Allows override the proxy on a per-request basis
|
||||
|
||||
# https://requests.readthedocs.io/en/latest/user/advanced/#socks
|
||||
# Should also work with `socks5://user:pass@host:port` type syntax.
|
||||
|
||||
if self.proxy_override:
|
||||
proxies = {'http': self.proxy_override, 'https': self.proxy_override, 'ftp': self.proxy_override}
|
||||
else:
|
||||
|
||||
1730
changedetectionio/flask_app.py
Normal file
@@ -15,14 +15,20 @@ from wtforms import (
|
||||
validators,
|
||||
widgets
|
||||
)
|
||||
from flask_wtf.file import FileField, FileAllowed
|
||||
from wtforms.fields import FieldList
|
||||
|
||||
from wtforms.validators import ValidationError
|
||||
|
||||
from validators.url import url as url_validator
|
||||
|
||||
|
||||
# default
|
||||
# each select <option data-enabled="enabled-0-0"
|
||||
from changedetectionio.blueprint.browser_steps.browser_steps import browser_step_ui_config
|
||||
|
||||
from changedetectionio import content_fetcher
|
||||
from changedetectionio import content_fetcher, html_tools
|
||||
|
||||
from changedetectionio.notification import (
|
||||
valid_notification_formats,
|
||||
)
|
||||
@@ -40,7 +46,7 @@ valid_method = {
|
||||
}
|
||||
|
||||
default_method = 'GET'
|
||||
|
||||
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
|
||||
|
||||
class StringListField(StringField):
|
||||
widget = widgets.TextArea()
|
||||
@@ -162,7 +168,9 @@ class ValidateContentFetcherIsReady(object):
|
||||
def __call__(self, form, field):
|
||||
import urllib3.exceptions
|
||||
from changedetectionio import content_fetcher
|
||||
return
|
||||
|
||||
# AttributeError: module 'changedetectionio.content_fetcher' has no attribute 'extra_browser_unlocked<>ASDF213r123r'
|
||||
# Better would be a radiohandler that keeps a reference to each class
|
||||
if field.data is not None and field.data != 'system':
|
||||
klass = getattr(content_fetcher, field.data)
|
||||
@@ -260,19 +268,23 @@ class validateURL(object):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
import validators
|
||||
# If hosts that only contain alphanumerics are allowed ("localhost" for example)
|
||||
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
|
||||
try:
|
||||
validators.url(field.data.strip(), simple_host=allow_simplehost)
|
||||
except validators.ValidationFailure:
|
||||
message = field.gettext('\'%s\' is not a valid URL.' % (field.data.strip()))
|
||||
raise ValidationError(message)
|
||||
# This should raise a ValidationError() or not
|
||||
validate_url(field.data)
|
||||
|
||||
from .model.Watch import is_safe_url
|
||||
if not is_safe_url(field.data):
|
||||
raise ValidationError('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX')
|
||||
def validate_url(test_url):
|
||||
# If hosts that only contain alphanumerics are allowed ("localhost" for example)
|
||||
try:
|
||||
url_validator(test_url, simple_host=allow_simplehost)
|
||||
except validators.ValidationError:
|
||||
#@todo check for xss
|
||||
message = f"'{test_url}' is not a valid URL."
|
||||
# This should be wtforms.validators.
|
||||
raise ValidationError(message)
|
||||
|
||||
from .model.Watch import is_safe_url
|
||||
if not is_safe_url(test_url):
|
||||
# This should be wtforms.validators.
|
||||
raise ValidationError('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX or incorrect URL format')
|
||||
|
||||
class ValidateListRegex(object):
|
||||
"""
|
||||
@@ -284,11 +296,10 @@ class ValidateListRegex(object):
|
||||
def __call__(self, form, field):
|
||||
|
||||
for line in field.data:
|
||||
if line[0] == '/' and line[-1] == '/':
|
||||
# Because internally we dont wrap in /
|
||||
line = line.strip('/')
|
||||
if re.search(html_tools.PERL_STYLE_REGEX, line, re.IGNORECASE):
|
||||
try:
|
||||
re.compile(line)
|
||||
regex = html_tools.perl_style_slash_enclosed_regex_to_options(line)
|
||||
re.compile(regex)
|
||||
except re.error:
|
||||
message = field.gettext('RegEx \'%s\' is not a valid regular expression.')
|
||||
raise ValidationError(message % (line))
|
||||
@@ -317,11 +328,30 @@ class ValidateCSSJSONXPATHInput(object):
|
||||
return
|
||||
|
||||
# Does it look like XPath?
|
||||
if line.strip()[0] == '/':
|
||||
if line.strip()[0] == '/' or line.strip().startswith('xpath:'):
|
||||
if not self.allow_xpath:
|
||||
raise ValidationError("XPath not permitted in this field!")
|
||||
from lxml import etree, html
|
||||
import elementpath
|
||||
# xpath 2.0-3.1
|
||||
from elementpath.xpath3 import XPath3Parser
|
||||
tree = html.fromstring("<html></html>")
|
||||
line = line.replace('xpath:', '')
|
||||
|
||||
try:
|
||||
elementpath.select(tree, line.strip(), parser=XPath3Parser)
|
||||
except elementpath.ElementPathError as e:
|
||||
message = field.gettext('\'%s\' is not a valid XPath expression. (%s)')
|
||||
raise ValidationError(message % (line, str(e)))
|
||||
except:
|
||||
raise ValidationError("A system-error occurred when validating your XPath expression")
|
||||
|
||||
if line.strip().startswith('xpath1:'):
|
||||
if not self.allow_xpath:
|
||||
raise ValidationError("XPath not permitted in this field!")
|
||||
from lxml import etree, html
|
||||
tree = html.fromstring("<html></html>")
|
||||
line = re.sub(r'^xpath1:', '', line)
|
||||
|
||||
try:
|
||||
tree.xpath(line.strip())
|
||||
@@ -398,6 +428,9 @@ class importForm(Form):
|
||||
from . import processors
|
||||
processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff")
|
||||
urls = TextAreaField('URLs')
|
||||
xlsx_file = FileField('Upload .xlsx file', validators=[FileAllowed(['xlsx'], 'Must be .xlsx file!')])
|
||||
file_mapping = SelectField('File mapping', [validators.DataRequired()], choices={('wachete', 'Wachete mapping'), ('custom','Custom mapping')})
|
||||
|
||||
|
||||
class SingleBrowserStep(Form):
|
||||
|
||||
@@ -481,9 +514,15 @@ class SingleExtraProxy(Form):
|
||||
|
||||
# maybe better to set some <script>var..
|
||||
proxy_name = StringField('Name', [validators.Optional()], render_kw={"placeholder": "Name"})
|
||||
proxy_url = StringField('Proxy URL', [validators.Optional()], render_kw={"placeholder": "http://user:pass@...:3128", "size":50})
|
||||
proxy_url = StringField('Proxy URL', [validators.Optional()], render_kw={"placeholder": "socks5:// or regular proxy http://user:pass@...:3128", "size":50})
|
||||
# @todo do the validation here instead
|
||||
|
||||
class SingleExtraBrowser(Form):
|
||||
browser_name = StringField('Name', [validators.Optional()], render_kw={"placeholder": "Name"})
|
||||
browser_connection_url = StringField('Browser connection URL', [validators.Optional()], render_kw={"placeholder": "wss://brightdata... wss://oxylabs etc", "size":50})
|
||||
# @todo do the validation here instead
|
||||
|
||||
|
||||
# datastore.data['settings']['requests']..
|
||||
class globalSettingsRequestForm(Form):
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
@@ -492,6 +531,7 @@ class globalSettingsRequestForm(Form):
|
||||
render_kw={"style": "width: 5em;"},
|
||||
validators=[validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
extra_proxies = FieldList(FormField(SingleExtraProxy), min_entries=5)
|
||||
extra_browsers = FieldList(FormField(SingleExtraBrowser), min_entries=5)
|
||||
|
||||
def validate_extra_proxies(self, extra_validators=None):
|
||||
for e in self.data['extra_proxies']:
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from inscriptis import get_text
|
||||
from inscriptis.model.config import ParserConfig
|
||||
from jsonpath_ng.ext import parse
|
||||
from typing import List
|
||||
from inscriptis.css_profiles import CSS_PROFILES, HtmlElement
|
||||
from inscriptis.html_properties import Display
|
||||
from inscriptis.model.config import ParserConfig
|
||||
from xml.sax.saxutils import escape as xml_escape
|
||||
import json
|
||||
import re
|
||||
|
||||
@@ -66,12 +69,96 @@ def element_removal(selectors: List[str], html_content):
|
||||
selector = ",".join(selectors)
|
||||
return subtractive_css_selector(selector, html_content)
|
||||
|
||||
def elementpath_tostring(obj):
|
||||
"""
|
||||
change elementpath.select results to string type
|
||||
# The MIT License (MIT), Copyright (c), 2018-2021, SISSA (Scuola Internazionale Superiore di Studi Avanzati)
|
||||
# https://github.com/sissaschool/elementpath/blob/dfcc2fd3d6011b16e02bf30459a7924f547b47d0/elementpath/xpath_tokens.py#L1038
|
||||
"""
|
||||
|
||||
import elementpath
|
||||
from decimal import Decimal
|
||||
import math
|
||||
|
||||
if obj is None:
|
||||
return ''
|
||||
# https://elementpath.readthedocs.io/en/latest/xpath_api.html#elementpath.select
|
||||
elif isinstance(obj, elementpath.XPathNode):
|
||||
return obj.string_value
|
||||
elif isinstance(obj, bool):
|
||||
return 'true' if obj else 'false'
|
||||
elif isinstance(obj, Decimal):
|
||||
value = format(obj, 'f')
|
||||
if '.' in value:
|
||||
return value.rstrip('0').rstrip('.')
|
||||
return value
|
||||
|
||||
elif isinstance(obj, float):
|
||||
if math.isnan(obj):
|
||||
return 'NaN'
|
||||
elif math.isinf(obj):
|
||||
return str(obj).upper()
|
||||
|
||||
value = str(obj)
|
||||
if '.' in value:
|
||||
value = value.rstrip('0').rstrip('.')
|
||||
if '+' in value:
|
||||
value = value.replace('+', '')
|
||||
if 'e' in value:
|
||||
return value.upper()
|
||||
return value
|
||||
|
||||
return str(obj)
|
||||
|
||||
# Return str Utf-8 of matched rules
|
||||
def xpath_filter(xpath_filter, html_content, append_pretty_line_formatting=False):
|
||||
def xpath_filter(xpath_filter, html_content, append_pretty_line_formatting=False, is_rss=False):
|
||||
from lxml import etree, html
|
||||
import elementpath
|
||||
# xpath 2.0-3.1
|
||||
from elementpath.xpath3 import XPath3Parser
|
||||
|
||||
parser = etree.HTMLParser()
|
||||
if is_rss:
|
||||
# So that we can keep CDATA for cdata_in_document_to_text() to process
|
||||
parser = etree.XMLParser(strip_cdata=False)
|
||||
|
||||
tree = html.fromstring(bytes(html_content, encoding='utf-8'), parser=parser)
|
||||
html_block = ""
|
||||
|
||||
r = elementpath.select(tree, xpath_filter.strip(), namespaces={'re': 'http://exslt.org/regular-expressions'}, parser=XPath3Parser)
|
||||
#@note: //title/text() wont work where <title>CDATA..
|
||||
|
||||
if type(r) != list:
|
||||
r = [r]
|
||||
|
||||
for element in r:
|
||||
# When there's more than 1 match, then add the suffix to separate each line
|
||||
# And where the matched result doesn't include something that will cause Inscriptis to add a newline
|
||||
# (This way each 'match' reliably has a new-line in the diff)
|
||||
# Divs are converted to 4 whitespaces by inscriptis
|
||||
if append_pretty_line_formatting and len(html_block) and (not hasattr( element, 'tag' ) or not element.tag in (['br', 'hr', 'div', 'p'])):
|
||||
html_block += TEXT_FILTER_LIST_LINE_SUFFIX
|
||||
|
||||
if type(element) == str:
|
||||
html_block += element
|
||||
elif issubclass(type(element), etree._Element) or issubclass(type(element), etree._ElementTree):
|
||||
html_block += etree.tostring(element, pretty_print=True).decode('utf-8')
|
||||
else:
|
||||
html_block += elementpath_tostring(element)
|
||||
|
||||
return html_block
|
||||
|
||||
# Return str Utf-8 of matched rules
|
||||
# 'xpath1:'
|
||||
def xpath1_filter(xpath_filter, html_content, append_pretty_line_formatting=False, is_rss=False):
|
||||
from lxml import etree, html
|
||||
|
||||
tree = html.fromstring(bytes(html_content, encoding='utf-8'))
|
||||
parser = None
|
||||
if is_rss:
|
||||
# So that we can keep CDATA for cdata_in_document_to_text() to process
|
||||
parser = etree.XMLParser(strip_cdata=False)
|
||||
|
||||
tree = html.fromstring(bytes(html_content, encoding='utf-8'), parser=parser)
|
||||
html_block = ""
|
||||
|
||||
r = tree.xpath(xpath_filter.strip(), namespaces={'re': 'http://exslt.org/regular-expressions'})
|
||||
@@ -94,7 +181,6 @@ def xpath_filter(xpath_filter, html_content, append_pretty_line_formatting=False
|
||||
|
||||
return html_block
|
||||
|
||||
|
||||
# Extract/find element
|
||||
def extract_element(find='title', html_content=''):
|
||||
|
||||
@@ -260,8 +346,15 @@ def strip_ignore_text(content, wordlist, mode="content"):
|
||||
|
||||
return "\n".encode('utf8').join(output)
|
||||
|
||||
def cdata_in_document_to_text(html_content: str, render_anchor_tag_content=False) -> str:
|
||||
pattern = '<!\[CDATA\[(\s*(?:.(?<!\]\]>)\s*)*)\]\]>'
|
||||
def repl(m):
|
||||
text = m.group(1)
|
||||
return xml_escape(html_to_text(html_content=text)).strip()
|
||||
|
||||
def html_to_text(html_content: str, render_anchor_tag_content=False) -> str:
|
||||
return re.sub(pattern, repl, html_content)
|
||||
|
||||
def html_to_text(html_content: str, render_anchor_tag_content=False, is_rss=False) -> str:
|
||||
"""Converts html string to a string with just the text. If ignoring
|
||||
rendering anchor tag content is enable, anchor tag content are also
|
||||
included in the text
|
||||
@@ -277,16 +370,21 @@ def html_to_text(html_content: str, render_anchor_tag_content=False) -> str:
|
||||
# if anchor tag content flag is set to True define a config for
|
||||
# extracting this content
|
||||
if render_anchor_tag_content:
|
||||
|
||||
parser_config = ParserConfig(
|
||||
annotation_rules={"a": ["hyperlink"]}, display_links=True
|
||||
annotation_rules={"a": ["hyperlink"]},
|
||||
display_links=True
|
||||
)
|
||||
|
||||
# otherwise set config to None
|
||||
# otherwise set config to None/default
|
||||
else:
|
||||
parser_config = None
|
||||
|
||||
# get text and annotations via inscriptis
|
||||
# RSS Mode - Inscriptis will treat `title` as something else.
|
||||
# Make it as a regular block display element (//item/title)
|
||||
# This is a bit of a hack - the real way it to use XSLT to convert it to HTML #1874
|
||||
if is_rss:
|
||||
html_content = re.sub(r'<title([\s>])', r'<h1\1', html_content)
|
||||
html_content = re.sub(r'</title>', r'</h1>', html_content)
|
||||
|
||||
text_content = get_text(html_content, config=parser_config)
|
||||
|
||||
return text_content
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
from abc import ABC, abstractmethod
|
||||
import time
|
||||
import validators
|
||||
from wtforms import ValidationError
|
||||
|
||||
from changedetectionio.forms import validate_url
|
||||
|
||||
|
||||
class Importer():
|
||||
@@ -12,6 +15,7 @@ class Importer():
|
||||
self.new_uuids = []
|
||||
self.good = 0
|
||||
self.remaining_data = []
|
||||
self.import_profile = None
|
||||
|
||||
@abstractmethod
|
||||
def run(self,
|
||||
@@ -132,3 +136,167 @@ class import_distill_io_json(Importer):
|
||||
good += 1
|
||||
|
||||
flash("{} Imported from Distill.io in {:.2f}s, {} Skipped.".format(len(self.new_uuids), time.time() - now, len(self.remaining_data)))
|
||||
|
||||
|
||||
class import_xlsx_wachete(Importer):
|
||||
|
||||
def run(self,
|
||||
data,
|
||||
flash,
|
||||
datastore,
|
||||
):
|
||||
|
||||
good = 0
|
||||
now = time.time()
|
||||
self.new_uuids = []
|
||||
|
||||
from openpyxl import load_workbook
|
||||
|
||||
try:
|
||||
wb = load_workbook(data)
|
||||
except Exception as e:
|
||||
# @todo correct except
|
||||
flash("Unable to read export XLSX file, something wrong with the file?", 'error')
|
||||
return
|
||||
|
||||
row_id = 2
|
||||
for row in wb.active.iter_rows(min_row=row_id):
|
||||
try:
|
||||
extras = {}
|
||||
data = {}
|
||||
for cell in row:
|
||||
if not cell.value:
|
||||
continue
|
||||
column_title = wb.active.cell(row=1, column=cell.column).value.strip().lower()
|
||||
data[column_title] = cell.value
|
||||
|
||||
# Forced switch to webdriver/playwright/etc
|
||||
dynamic_wachet = str(data.get('dynamic wachet', '')).strip().lower() # Convert bool to str to cover all cases
|
||||
# libreoffice and others can have it as =FALSE() =TRUE(), or bool(true)
|
||||
if 'true' in dynamic_wachet or dynamic_wachet == '1':
|
||||
extras['fetch_backend'] = 'html_webdriver'
|
||||
elif 'false' in dynamic_wachet or dynamic_wachet == '0':
|
||||
extras['fetch_backend'] = 'html_requests'
|
||||
|
||||
if data.get('xpath'):
|
||||
# @todo split by || ?
|
||||
extras['include_filters'] = [data.get('xpath')]
|
||||
if data.get('name'):
|
||||
extras['title'] = data.get('name').strip()
|
||||
if data.get('interval (min)'):
|
||||
minutes = int(data.get('interval (min)'))
|
||||
hours, minutes = divmod(minutes, 60)
|
||||
days, hours = divmod(hours, 24)
|
||||
weeks, days = divmod(days, 7)
|
||||
extras['time_between_check'] = {'weeks': weeks, 'days': days, 'hours': hours, 'minutes': minutes, 'seconds': 0}
|
||||
|
||||
# At minimum a URL is required.
|
||||
if data.get('url'):
|
||||
try:
|
||||
validate_url(data.get('url'))
|
||||
except ValidationError as e:
|
||||
print(">> import URL error", data.get('url'), str(e))
|
||||
flash(f"Error processing row number {row_id}, URL value was incorrect, row was skipped.", 'error')
|
||||
# Don't bother processing anything else on this row
|
||||
continue
|
||||
|
||||
new_uuid = datastore.add_watch(url=data['url'].strip(),
|
||||
extras=extras,
|
||||
tag=data.get('folder'),
|
||||
write_to_disk_now=False)
|
||||
if new_uuid:
|
||||
# Straight into the queue.
|
||||
self.new_uuids.append(new_uuid)
|
||||
good += 1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
flash(f"Error processing row number {row_id}, check all cell data types are correct, row was skipped.", 'error')
|
||||
else:
|
||||
row_id += 1
|
||||
|
||||
flash(
|
||||
"{} imported from Wachete .xlsx in {:.2f}s".format(len(self.new_uuids), time.time() - now))
|
||||
|
||||
|
||||
class import_xlsx_custom(Importer):
|
||||
|
||||
def run(self,
|
||||
data,
|
||||
flash,
|
||||
datastore,
|
||||
):
|
||||
|
||||
good = 0
|
||||
now = time.time()
|
||||
self.new_uuids = []
|
||||
|
||||
from openpyxl import load_workbook
|
||||
|
||||
try:
|
||||
wb = load_workbook(data)
|
||||
except Exception as e:
|
||||
# @todo correct except
|
||||
flash("Unable to read export XLSX file, something wrong with the file?", 'error')
|
||||
return
|
||||
|
||||
# @todo cehck atleast 2 rows, same in other method
|
||||
from .forms import validate_url
|
||||
row_i = 1
|
||||
|
||||
try:
|
||||
for row in wb.active.iter_rows():
|
||||
url = None
|
||||
tags = None
|
||||
extras = {}
|
||||
|
||||
for cell in row:
|
||||
if not self.import_profile.get(cell.col_idx):
|
||||
continue
|
||||
if not cell.value:
|
||||
continue
|
||||
|
||||
cell_map = self.import_profile.get(cell.col_idx)
|
||||
|
||||
cell_val = str(cell.value).strip() # could be bool
|
||||
|
||||
if cell_map == 'url':
|
||||
url = cell.value.strip()
|
||||
try:
|
||||
validate_url(url)
|
||||
except ValidationError as e:
|
||||
print(">> Import URL error", url, str(e))
|
||||
flash(f"Error processing row number {row_i}, URL value was incorrect, row was skipped.", 'error')
|
||||
# Don't bother processing anything else on this row
|
||||
url = None
|
||||
break
|
||||
elif cell_map == 'tag':
|
||||
tags = cell.value.strip()
|
||||
elif cell_map == 'include_filters':
|
||||
# @todo validate?
|
||||
extras['include_filters'] = [cell.value.strip()]
|
||||
elif cell_map == 'interval_minutes':
|
||||
hours, minutes = divmod(int(cell_val), 60)
|
||||
days, hours = divmod(hours, 24)
|
||||
weeks, days = divmod(days, 7)
|
||||
extras['time_between_check'] = {'weeks': weeks, 'days': days, 'hours': hours, 'minutes': minutes, 'seconds': 0}
|
||||
else:
|
||||
extras[cell_map] = cell_val
|
||||
|
||||
# At minimum a URL is required.
|
||||
if url:
|
||||
new_uuid = datastore.add_watch(url=url,
|
||||
extras=extras,
|
||||
tag=tags,
|
||||
write_to_disk_now=False)
|
||||
if new_uuid:
|
||||
# Straight into the queue.
|
||||
self.new_uuids.append(new_uuid)
|
||||
good += 1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
flash(f"Error processing row number {row_i}, check all cell data types are correct, row was skipped.", 'error')
|
||||
else:
|
||||
row_i += 1
|
||||
|
||||
flash(
|
||||
"{} imported from custom .xlsx in {:.2f}s".format(len(self.new_uuids), time.time() - now))
|
||||
|
||||
@@ -16,6 +16,7 @@ class model(dict):
|
||||
},
|
||||
'requests': {
|
||||
'extra_proxies': [], # Configurable extra proxies via the UI
|
||||
'extra_browsers': [], # Configurable extra proxies via the UI
|
||||
'jitter_seconds': 0,
|
||||
'proxy': None, # Preferred proxy connection
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': 3, 'minutes': None, 'seconds': None},
|
||||
|
||||
@@ -4,6 +4,7 @@ import os
|
||||
import re
|
||||
import time
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
|
||||
# Allowable protocols, protects against javascript: etc
|
||||
# file:// is further checked by ALLOW_FILE_URI
|
||||
@@ -18,6 +19,8 @@ from changedetectionio.notification import (
|
||||
|
||||
base_config = {
|
||||
'body': None,
|
||||
'browser_steps': [],
|
||||
'browser_steps_last_error_step': None,
|
||||
'check_unique_lines': False, # On change-detected, compare against all history if its something new
|
||||
'check_count': 0,
|
||||
'date_created': None,
|
||||
@@ -25,6 +28,7 @@ base_config = {
|
||||
'extract_text': [], # Extract text by regex after filters
|
||||
'extract_title_as_title': False,
|
||||
'fetch_backend': 'system', # plaintext, playwright etc
|
||||
'fetch_time': 0.0,
|
||||
'processor': 'text_json_diff', # could be restock_diff or others from .processors
|
||||
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
|
||||
'filter_text_added': True,
|
||||
@@ -34,6 +38,7 @@ base_config = {
|
||||
'track_ldjson_price_data': None,
|
||||
'headers': {}, # Extra headers to send
|
||||
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
'in_stock' : None,
|
||||
'in_stock_only' : True, # Only trigger change on going to instock from out-of-stock
|
||||
'include_filters': [],
|
||||
'last_checked': 0,
|
||||
@@ -109,7 +114,8 @@ class model(dict):
|
||||
|
||||
@property
|
||||
def viewed(self):
|
||||
if int(self['last_viewed']) >= int(self.newest_history_key) :
|
||||
# Don't return viewed when last_viewed is 0 and newest_key is 0
|
||||
if int(self['last_viewed']) and int(self['last_viewed']) >= int(self.newest_history_key) :
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -142,8 +148,14 @@ class model(dict):
|
||||
flash(message, 'error')
|
||||
return ''
|
||||
|
||||
if ready_url.startswith('source:'):
|
||||
ready_url=ready_url.replace('source:', '')
|
||||
return ready_url
|
||||
|
||||
@property
|
||||
def is_source_type_url(self):
|
||||
return self.get('url', '').startswith('source:')
|
||||
|
||||
@property
|
||||
def get_fetch_backend(self):
|
||||
"""
|
||||
@@ -167,9 +179,7 @@ class model(dict):
|
||||
@property
|
||||
def label(self):
|
||||
# Used for sorting
|
||||
if self['title']:
|
||||
return self['title']
|
||||
return self['url']
|
||||
return self.get('title') if self.get('title') else self.get('url')
|
||||
|
||||
@property
|
||||
def last_changed(self):
|
||||
@@ -233,6 +243,14 @@ class model(dict):
|
||||
fname = os.path.join(self.watch_data_dir, "history.txt")
|
||||
return os.path.isfile(fname)
|
||||
|
||||
@property
|
||||
def has_browser_steps(self):
|
||||
has_browser_steps = self.get('browser_steps') and list(filter(
|
||||
lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
|
||||
self.get('browser_steps')))
|
||||
|
||||
return has_browser_steps
|
||||
|
||||
# Returns the newest key, but if theres only 1 record, then it's counted as not being new, so return 0.
|
||||
@property
|
||||
def newest_history_key(self):
|
||||
@@ -246,6 +264,38 @@ class model(dict):
|
||||
bump = self.history
|
||||
return self.__newest_history_key
|
||||
|
||||
# Given an arbitrary timestamp, find the closest next key
|
||||
# For example, last_viewed = 1000 so it should return the next 1001 timestamp
|
||||
#
|
||||
# used for the [diff] button so it can preset a smarter from_version
|
||||
@property
|
||||
def get_next_snapshot_key_to_last_viewed(self):
|
||||
|
||||
"""Unfortunately for now timestamp is stored as string key"""
|
||||
keys = list(self.history.keys())
|
||||
if not keys:
|
||||
return None
|
||||
|
||||
last_viewed = int(self.get('last_viewed'))
|
||||
prev_k = keys[0]
|
||||
sorted_keys = sorted(keys, key=lambda x: int(x))
|
||||
sorted_keys.reverse()
|
||||
|
||||
# When the 'last viewed' timestamp is greater than the newest snapshot, return second last
|
||||
if last_viewed > int(sorted_keys[0]):
|
||||
return sorted_keys[1]
|
||||
|
||||
for k in sorted_keys:
|
||||
if int(k) < last_viewed:
|
||||
if prev_k == sorted_keys[0]:
|
||||
# Return the second last one so we dont recommend the same version compares itself
|
||||
return sorted_keys[1]
|
||||
|
||||
return prev_k
|
||||
prev_k = k
|
||||
|
||||
return keys[0]
|
||||
|
||||
def get_history_snapshot(self, timestamp):
|
||||
import brotli
|
||||
filepath = self.history[timestamp]
|
||||
@@ -491,3 +541,13 @@ class model(dict):
|
||||
filepath = os.path.join(self.watch_data_dir, 'last-fetched.br')
|
||||
with open(filepath, 'wb') as f:
|
||||
f.write(brotli.compress(contents, mode=brotli.MODE_TEXT))
|
||||
|
||||
@property
|
||||
def get_browsersteps_available_screenshots(self):
|
||||
"For knowing which screenshots are available to show the user in BrowserSteps UI"
|
||||
available = []
|
||||
for f in Path(self.watch_data_dir).glob('step_before-*.jpeg'):
|
||||
step_n=re.search(r'step_before-(\d+)', f.name)
|
||||
if step_n:
|
||||
available.append(step_n.group(1))
|
||||
return available
|
||||
|
||||
@@ -46,6 +46,9 @@ from apprise.decorators import notify
|
||||
@notify(on="puts")
|
||||
def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs):
|
||||
import requests
|
||||
from apprise.utils import parse_url as apprise_parse_url
|
||||
from apprise.URLBase import URLBase
|
||||
|
||||
url = kwargs['meta'].get('url')
|
||||
|
||||
if url.startswith('post'):
|
||||
@@ -68,16 +71,45 @@ def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs):
|
||||
url = url.replace('delete://', 'http://')
|
||||
url = url.replace('deletes://', 'https://')
|
||||
|
||||
# Try to auto-guess if it's JSON
|
||||
headers = {}
|
||||
params = {}
|
||||
auth = None
|
||||
|
||||
# Convert /foobar?+some-header=hello to proper header dictionary
|
||||
results = apprise_parse_url(url)
|
||||
if results:
|
||||
# Add our headers that the user can potentially over-ride if they wish
|
||||
# to to our returned result set and tidy entries by unquoting them
|
||||
headers = {URLBase.unquote(x): URLBase.unquote(y)
|
||||
for x, y in results['qsd+'].items()}
|
||||
|
||||
# https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation
|
||||
# In Apprise, it relies on prefixing each request arg with "-", because it uses say &method=update as a flag for apprise
|
||||
# but here we are making straight requests, so we need todo convert this against apprise's logic
|
||||
for k, v in results['qsd'].items():
|
||||
if not k.strip('+-') in results['qsd+'].keys():
|
||||
params[URLBase.unquote(k)] = URLBase.unquote(v)
|
||||
|
||||
# Determine Authentication
|
||||
auth = ''
|
||||
if results.get('user') and results.get('password'):
|
||||
auth = (URLBase.unquote(results.get('user')), URLBase.unquote(results.get('user')))
|
||||
elif results.get('user'):
|
||||
auth = (URLBase.unquote(results.get('user')))
|
||||
|
||||
# Try to auto-guess if it's JSON
|
||||
try:
|
||||
json.loads(body)
|
||||
headers = {'Content-Type': 'application/json; charset=utf-8'}
|
||||
headers['Content-Type'] = 'application/json; charset=utf-8'
|
||||
except ValueError as e:
|
||||
pass
|
||||
|
||||
|
||||
r(url, headers=headers, data=body)
|
||||
r(results.get('url'),
|
||||
auth=auth,
|
||||
data=body,
|
||||
headers=headers,
|
||||
params=params
|
||||
)
|
||||
|
||||
|
||||
def process_notification(n_object, datastore):
|
||||
@@ -189,13 +221,14 @@ def process_notification(n_object, datastore):
|
||||
|
||||
|
||||
# Notification title + body content parameters get created here.
|
||||
# ( Where we prepare the tokens in the notification to be replaced with actual values )
|
||||
def create_notification_parameters(n_object, datastore):
|
||||
from copy import deepcopy
|
||||
|
||||
# in the case we send a test notification from the main settings, there is no UUID.
|
||||
uuid = n_object['uuid'] if 'uuid' in n_object else ''
|
||||
|
||||
if uuid != '':
|
||||
if uuid:
|
||||
watch_title = datastore.data['watching'][uuid].get('title', '')
|
||||
tag_list = []
|
||||
tags = datastore.get_all_tags_for_watch(uuid)
|
||||
@@ -223,7 +256,7 @@ def create_notification_parameters(n_object, datastore):
|
||||
tokens.update(
|
||||
{
|
||||
'base_url': base_url,
|
||||
'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else '',
|
||||
'current_snapshot': n_object.get('current_snapshot', ''),
|
||||
'diff': n_object.get('diff', ''), # Null default in the case we use a test
|
||||
'diff_added': n_object.get('diff_added', ''), # Null default in the case we use a test
|
||||
'diff_full': n_object.get('diff_full', ''), # Null default in the case we use a test
|
||||
|
||||
@@ -1,15 +1,127 @@
|
||||
from abc import abstractmethod
|
||||
import os
|
||||
import hashlib
|
||||
|
||||
import re
|
||||
from changedetectionio import content_fetcher
|
||||
from copy import deepcopy
|
||||
from distutils.util import strtobool
|
||||
|
||||
class difference_detection_processor():
|
||||
|
||||
browser_steps = None
|
||||
datastore = None
|
||||
fetcher = None
|
||||
screenshot = None
|
||||
watch = None
|
||||
xpath_data = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args, datastore, watch_uuid, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.datastore = datastore
|
||||
self.watch = deepcopy(self.datastore.data['watching'].get(watch_uuid))
|
||||
|
||||
def call_browser(self):
|
||||
|
||||
# Protect against file:// access
|
||||
if re.search(r'^file://', self.watch.get('url', '').strip(), re.IGNORECASE):
|
||||
if not strtobool(os.getenv('ALLOW_FILE_URI', 'false')):
|
||||
raise Exception(
|
||||
"file:// type access is denied for security reasons."
|
||||
)
|
||||
|
||||
url = self.watch.link
|
||||
|
||||
# Requests, playwright, other browser via wss:// etc, fetch_extra_something
|
||||
prefer_fetch_backend = self.watch.get('fetch_backend', 'system')
|
||||
|
||||
# Proxy ID "key"
|
||||
preferred_proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=self.watch.get('uuid'))
|
||||
|
||||
# Pluggable content self.fetcher
|
||||
if not prefer_fetch_backend or prefer_fetch_backend == 'system':
|
||||
prefer_fetch_backend = self.datastore.data['settings']['application'].get('fetch_backend')
|
||||
|
||||
# In the case that the preferred fetcher was a browser config with custom connection URL..
|
||||
# @todo - on save watch, if its extra_browser_ then it should be obvious it will use playwright (like if its requests now..)
|
||||
custom_browser_connection_url = None
|
||||
if prefer_fetch_backend.startswith('extra_browser_'):
|
||||
(t, key) = prefer_fetch_backend.split('extra_browser_')
|
||||
connection = list(
|
||||
filter(lambda s: (s['browser_name'] == key), self.datastore.data['settings']['requests'].get('extra_browsers', [])))
|
||||
if connection:
|
||||
prefer_fetch_backend = 'base_html_playwright'
|
||||
custom_browser_connection_url = connection[0].get('browser_connection_url')
|
||||
|
||||
# PDF should be html_requests because playwright will serve it up (so far) in a embedded page
|
||||
# @todo https://github.com/dgtlmoon/changedetection.io/issues/2019
|
||||
# @todo needs test to or a fix
|
||||
if self.watch.is_pdf:
|
||||
prefer_fetch_backend = "html_requests"
|
||||
|
||||
# Grab the right kind of 'fetcher', (playwright, requests, etc)
|
||||
if hasattr(content_fetcher, prefer_fetch_backend):
|
||||
fetcher_obj = getattr(content_fetcher, prefer_fetch_backend)
|
||||
else:
|
||||
# If the klass doesnt exist, just use a default
|
||||
fetcher_obj = getattr(content_fetcher, "html_requests")
|
||||
|
||||
|
||||
proxy_url = None
|
||||
if preferred_proxy_id:
|
||||
proxy_url = self.datastore.proxy_list.get(preferred_proxy_id).get('url')
|
||||
print(f"Using proxy Key: {preferred_proxy_id} as Proxy URL {proxy_url}")
|
||||
|
||||
# Now call the fetcher (playwright/requests/etc) with arguments that only a fetcher would need.
|
||||
# When browser_connection_url is None, it method should default to working out whats the best defaults (os env vars etc)
|
||||
self.fetcher = fetcher_obj(proxy_override=proxy_url,
|
||||
custom_browser_connection_url=custom_browser_connection_url
|
||||
)
|
||||
|
||||
if self.watch.has_browser_steps:
|
||||
self.fetcher.browser_steps = self.watch.get('browser_steps', [])
|
||||
self.fetcher.browser_steps_screenshot_path = os.path.join(self.datastore.datastore_path, self.watch.get('uuid'))
|
||||
|
||||
# Tweak the base config with the per-watch ones
|
||||
request_headers = self.watch.get('headers', [])
|
||||
request_headers.update(self.datastore.get_all_base_headers())
|
||||
request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=self.watch.get('uuid')))
|
||||
|
||||
# https://github.com/psf/requests/issues/4525
|
||||
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
||||
# do this by accident.
|
||||
if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']:
|
||||
request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
|
||||
|
||||
timeout = self.datastore.data['settings']['requests'].get('timeout')
|
||||
|
||||
request_body = self.watch.get('body')
|
||||
request_method = self.watch.get('method')
|
||||
ignore_status_codes = self.watch.get('ignore_status_codes', False)
|
||||
|
||||
# Configurable per-watch or global extra delay before extracting text (for webDriver types)
|
||||
system_webdriver_delay = self.datastore.data['settings']['application'].get('webdriver_delay', None)
|
||||
if self.watch.get('webdriver_delay'):
|
||||
self.fetcher.render_extract_delay = self.watch.get('webdriver_delay')
|
||||
elif system_webdriver_delay is not None:
|
||||
self.fetcher.render_extract_delay = system_webdriver_delay
|
||||
|
||||
if self.watch.get('webdriver_js_execute_code') is not None and self.watch.get('webdriver_js_execute_code').strip():
|
||||
self.fetcher.webdriver_js_execute_code = self.watch.get('webdriver_js_execute_code')
|
||||
|
||||
# Requests for PDF's, images etc should be passwd the is_binary flag
|
||||
is_binary = self.watch.is_pdf
|
||||
|
||||
# And here we go! call the right browser with browser-specific settings
|
||||
self.fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, self.watch.get('include_filters'),
|
||||
is_binary=is_binary)
|
||||
|
||||
#@todo .quit here could go on close object, so we can run JS if change-detected
|
||||
self.fetcher.quit()
|
||||
|
||||
# After init, call run_changedetection() which will do the actual change-detection
|
||||
|
||||
@abstractmethod
|
||||
def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
|
||||
def run_changedetection(self, uuid, skip_when_checksum_same=True):
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
some_data = 'xxxxx'
|
||||
update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import urllib3
|
||||
from . import difference_detection_processor
|
||||
from changedetectionio import content_fetcher
|
||||
from copy import deepcopy
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
@@ -22,11 +19,7 @@ class perform_site_check(difference_detection_processor):
|
||||
screenshot = None
|
||||
xpath_data = None
|
||||
|
||||
def __init__(self, *args, datastore, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.datastore = datastore
|
||||
|
||||
def run(self, uuid, skip_when_checksum_same=True):
|
||||
def run_changedetection(self, uuid, skip_when_checksum_same=True):
|
||||
|
||||
# DeepCopy so we can be sure we don't accidently change anything by reference
|
||||
watch = deepcopy(self.datastore.data['watching'].get(uuid))
|
||||
@@ -34,84 +27,24 @@ class perform_site_check(difference_detection_processor):
|
||||
if not watch:
|
||||
raise Exception("Watch no longer exists.")
|
||||
|
||||
# Protect against file:// access
|
||||
if re.search(r'^file', watch.get('url', ''), re.IGNORECASE) and not os.getenv('ALLOW_FILE_URI', False):
|
||||
raise Exception(
|
||||
"file:// type access is denied for security reasons."
|
||||
)
|
||||
|
||||
# Unset any existing notification error
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
|
||||
request_headers = watch.get('headers', [])
|
||||
request_headers.update(self.datastore.get_all_base_headers())
|
||||
request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=uuid))
|
||||
|
||||
# https://github.com/psf/requests/issues/4525
|
||||
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
||||
# do this by accident.
|
||||
if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']:
|
||||
request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
|
||||
|
||||
timeout = self.datastore.data['settings']['requests'].get('timeout')
|
||||
|
||||
url = watch.link
|
||||
|
||||
request_body = self.datastore.data['watching'][uuid].get('body')
|
||||
request_method = self.datastore.data['watching'][uuid].get('method')
|
||||
ignore_status_codes = self.datastore.data['watching'][uuid].get('ignore_status_codes', False)
|
||||
|
||||
# Pluggable content fetcher
|
||||
prefer_backend = watch.get_fetch_backend
|
||||
if not prefer_backend or prefer_backend == 'system':
|
||||
prefer_backend = self.datastore.data['settings']['application']['fetch_backend']
|
||||
|
||||
if hasattr(content_fetcher, prefer_backend):
|
||||
klass = getattr(content_fetcher, prefer_backend)
|
||||
else:
|
||||
# If the klass doesnt exist, just use a default
|
||||
klass = getattr(content_fetcher, "html_requests")
|
||||
|
||||
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
||||
proxy_url = None
|
||||
if proxy_id:
|
||||
proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
|
||||
print("UUID {} Using proxy {}".format(uuid, proxy_url))
|
||||
|
||||
fetcher = klass(proxy_override=proxy_url)
|
||||
|
||||
# Configurable per-watch or global extra delay before extracting text (for webDriver types)
|
||||
system_webdriver_delay = self.datastore.data['settings']['application'].get('webdriver_delay', None)
|
||||
if watch['webdriver_delay'] is not None:
|
||||
fetcher.render_extract_delay = watch.get('webdriver_delay')
|
||||
elif system_webdriver_delay is not None:
|
||||
fetcher.render_extract_delay = system_webdriver_delay
|
||||
|
||||
# Could be removed if requests/plaintext could also return some info?
|
||||
if prefer_backend != 'html_webdriver':
|
||||
raise Exception("Re-stock detection requires Chrome or compatible webdriver/playwright fetcher to work")
|
||||
|
||||
if watch.get('webdriver_js_execute_code') is not None and watch.get('webdriver_js_execute_code').strip():
|
||||
fetcher.webdriver_js_execute_code = watch.get('webdriver_js_execute_code')
|
||||
|
||||
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'))
|
||||
fetcher.quit()
|
||||
|
||||
self.screenshot = fetcher.screenshot
|
||||
self.xpath_data = fetcher.xpath_data
|
||||
self.screenshot = self.fetcher.screenshot
|
||||
self.xpath_data = self.fetcher.xpath_data
|
||||
|
||||
# Track the content type
|
||||
update_obj['content_type'] = fetcher.headers.get('Content-Type', '')
|
||||
update_obj["last_check_status"] = fetcher.get_last_status_code()
|
||||
update_obj['content_type'] = self.fetcher.headers.get('Content-Type', '')
|
||||
update_obj["last_check_status"] = self.fetcher.get_last_status_code()
|
||||
|
||||
# Main detection method
|
||||
fetched_md5 = None
|
||||
if fetcher.instock_data:
|
||||
fetched_md5 = hashlib.md5(fetcher.instock_data.encode('utf-8')).hexdigest()
|
||||
if self.fetcher.instock_data:
|
||||
fetched_md5 = hashlib.md5(self.fetcher.instock_data.encode('utf-8')).hexdigest()
|
||||
# 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold.
|
||||
update_obj["in_stock"] = True if fetcher.instock_data == 'Possibly in stock' else False
|
||||
update_obj["in_stock"] = True if self.fetcher.instock_data == 'Possibly in stock' else False
|
||||
else:
|
||||
raise UnableToExtractRestockData(status_code=fetcher.status_code)
|
||||
raise UnableToExtractRestockData(status_code=self.fetcher.status_code)
|
||||
|
||||
# The main thing that all this at the moment comes down to :)
|
||||
changed_detected = False
|
||||
@@ -128,4 +61,4 @@ class perform_site_check(difference_detection_processor):
|
||||
# Always record the new checksum
|
||||
update_obj["previous_md5"] = fetched_md5
|
||||
|
||||
return changed_detected, update_obj, fetcher.instock_data.encode('utf-8')
|
||||
return changed_detected, update_obj, self.fetcher.instock_data.encode('utf-8')
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# HTML to TEXT/JSON DIFFERENCE FETCHER
|
||||
# HTML to TEXT/JSON DIFFERENCE self.fetcher
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
@@ -11,7 +11,7 @@ from changedetectionio import content_fetcher, html_tools
|
||||
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
|
||||
from copy import deepcopy
|
||||
from . import difference_detection_processor
|
||||
from ..html_tools import PERL_STYLE_REGEX
|
||||
from ..html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
@@ -32,15 +32,10 @@ class PDFToHTMLToolNotFound(ValueError):
|
||||
# Some common stuff here that can be moved to a base class
|
||||
# (set_proxy_from_list)
|
||||
class perform_site_check(difference_detection_processor):
|
||||
screenshot = None
|
||||
xpath_data = None
|
||||
|
||||
def __init__(self, *args, datastore, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.datastore = datastore
|
||||
|
||||
def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
|
||||
def run_changedetection(self, uuid, skip_when_checksum_same=True):
|
||||
changed_detected = False
|
||||
html_content = ""
|
||||
screenshot = False # as bytes
|
||||
stripped_text_from_html = ""
|
||||
|
||||
@@ -49,100 +44,25 @@ class perform_site_check(difference_detection_processor):
|
||||
if not watch:
|
||||
raise Exception("Watch no longer exists.")
|
||||
|
||||
# Protect against file:// access
|
||||
if re.search(r'^file', watch.get('url', ''), re.IGNORECASE) and not os.getenv('ALLOW_FILE_URI', False):
|
||||
raise Exception(
|
||||
"file:// type access is denied for security reasons."
|
||||
)
|
||||
|
||||
# Unset any existing notification error
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
|
||||
# Tweak the base config with the per-watch ones
|
||||
request_headers = watch.get('headers', [])
|
||||
request_headers.update(self.datastore.get_all_base_headers())
|
||||
request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=uuid))
|
||||
|
||||
# https://github.com/psf/requests/issues/4525
|
||||
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
||||
# do this by accident.
|
||||
if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']:
|
||||
request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
|
||||
|
||||
timeout = self.datastore.data['settings']['requests'].get('timeout')
|
||||
|
||||
url = watch.link
|
||||
|
||||
request_body = self.datastore.data['watching'][uuid].get('body')
|
||||
request_method = self.datastore.data['watching'][uuid].get('method')
|
||||
ignore_status_codes = self.datastore.data['watching'][uuid].get('ignore_status_codes', False)
|
||||
|
||||
# source: support
|
||||
is_source = False
|
||||
if url.startswith('source:'):
|
||||
url = url.replace('source:', '')
|
||||
is_source = True
|
||||
|
||||
# Pluggable content fetcher
|
||||
prefer_backend = watch.get_fetch_backend
|
||||
if not prefer_backend or prefer_backend == 'system':
|
||||
prefer_backend = self.datastore.data['settings']['application']['fetch_backend']
|
||||
|
||||
if hasattr(content_fetcher, prefer_backend):
|
||||
klass = getattr(content_fetcher, prefer_backend)
|
||||
else:
|
||||
# If the klass doesnt exist, just use a default
|
||||
klass = getattr(content_fetcher, "html_requests")
|
||||
|
||||
if preferred_proxy:
|
||||
proxy_id = preferred_proxy
|
||||
else:
|
||||
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
||||
|
||||
proxy_url = None
|
||||
if proxy_id:
|
||||
proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
|
||||
print("UUID {} Using proxy {}".format(uuid, proxy_url))
|
||||
|
||||
fetcher = klass(proxy_override=proxy_url)
|
||||
|
||||
# Configurable per-watch or global extra delay before extracting text (for webDriver types)
|
||||
system_webdriver_delay = self.datastore.data['settings']['application'].get('webdriver_delay', None)
|
||||
if watch['webdriver_delay'] is not None:
|
||||
fetcher.render_extract_delay = watch.get('webdriver_delay')
|
||||
elif system_webdriver_delay is not None:
|
||||
fetcher.render_extract_delay = system_webdriver_delay
|
||||
|
||||
# Possible conflict
|
||||
if prefer_backend == 'html_webdriver':
|
||||
fetcher.browser_steps = watch.get('browser_steps', None)
|
||||
fetcher.browser_steps_screenshot_path = os.path.join(self.datastore.datastore_path, uuid)
|
||||
|
||||
if watch.get('webdriver_js_execute_code') is not None and watch.get('webdriver_js_execute_code').strip():
|
||||
fetcher.webdriver_js_execute_code = watch.get('webdriver_js_execute_code')
|
||||
|
||||
# requests for PDF's, images etc should be passwd the is_binary flag
|
||||
is_binary = watch.is_pdf
|
||||
|
||||
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'),
|
||||
is_binary=is_binary)
|
||||
fetcher.quit()
|
||||
|
||||
self.screenshot = fetcher.screenshot
|
||||
self.xpath_data = fetcher.xpath_data
|
||||
self.screenshot = self.fetcher.screenshot
|
||||
self.xpath_data = self.fetcher.xpath_data
|
||||
|
||||
# Track the content type
|
||||
update_obj['content_type'] = fetcher.get_all_headers().get('content-type', '').lower()
|
||||
update_obj['content_type'] = self.fetcher.get_all_headers().get('content-type', '').lower()
|
||||
|
||||
# Watches added automatically in the queue manager will skip if its the same checksum as the previous run
|
||||
# Saves a lot of CPU
|
||||
update_obj['previous_md5_before_filters'] = hashlib.md5(fetcher.content.encode('utf-8')).hexdigest()
|
||||
update_obj['previous_md5_before_filters'] = hashlib.md5(self.fetcher.content.encode('utf-8')).hexdigest()
|
||||
if skip_when_checksum_same:
|
||||
if update_obj['previous_md5_before_filters'] == watch.get('previous_md5_before_filters'):
|
||||
raise content_fetcher.checksumFromPreviousCheckWasTheSame()
|
||||
|
||||
# Fetching complete, now filters
|
||||
# @todo move to class / maybe inside of fetcher abstract base?
|
||||
|
||||
# @note: I feel like the following should be in a more obvious chain system
|
||||
# - Check filter text
|
||||
@@ -151,15 +71,24 @@ class perform_site_check(difference_detection_processor):
|
||||
# https://stackoverflow.com/questions/41817578/basic-method-chaining ?
|
||||
# return content().textfilter().jsonextract().checksumcompare() ?
|
||||
|
||||
is_json = 'application/json' in fetcher.get_all_headers().get('content-type', '').lower()
|
||||
is_json = 'application/json' in self.fetcher.get_all_headers().get('content-type', '').lower()
|
||||
is_html = not is_json
|
||||
is_rss = False
|
||||
|
||||
ctype_header = self.fetcher.get_all_headers().get('content-type', '').lower()
|
||||
# Go into RSS preprocess for converting CDATA/comment to usable text
|
||||
if any(substring in ctype_header for substring in ['application/xml', 'application/rss', 'text/xml']):
|
||||
if '<rss' in self.fetcher.content[:100].lower():
|
||||
self.fetcher.content = cdata_in_document_to_text(html_content=self.fetcher.content)
|
||||
is_rss = True
|
||||
|
||||
# source: support, basically treat it as plaintext
|
||||
if is_source:
|
||||
if watch.is_source_type_url:
|
||||
is_html = False
|
||||
is_json = False
|
||||
|
||||
if watch.is_pdf or 'application/pdf' in fetcher.get_all_headers().get('content-type', '').lower():
|
||||
inline_pdf = self.fetcher.get_all_headers().get('content-disposition', '') and '%PDF-1' in self.fetcher.content[:10]
|
||||
if watch.is_pdf or 'application/pdf' in self.fetcher.get_all_headers().get('content-type', '').lower() or inline_pdf:
|
||||
from shutil import which
|
||||
tool = os.getenv("PDF_TO_HTML_TOOL", "pdftohtml")
|
||||
if not which(tool):
|
||||
@@ -170,18 +99,18 @@ class perform_site_check(difference_detection_processor):
|
||||
[tool, '-stdout', '-', '-s', 'out.pdf', '-i'],
|
||||
stdout=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE)
|
||||
proc.stdin.write(fetcher.raw_content)
|
||||
proc.stdin.write(self.fetcher.raw_content)
|
||||
proc.stdin.close()
|
||||
fetcher.content = proc.stdout.read().decode('utf-8')
|
||||
self.fetcher.content = proc.stdout.read().decode('utf-8')
|
||||
proc.wait(timeout=60)
|
||||
|
||||
# Add a little metadata so we know if the file changes (like if an image changes, but the text is the same
|
||||
# @todo may cause problems with non-UTF8?
|
||||
metadata = "<p>Added by changedetection.io: Document checksum - {} Filesize - {} bytes</p>".format(
|
||||
hashlib.md5(fetcher.raw_content).hexdigest().upper(),
|
||||
len(fetcher.content))
|
||||
hashlib.md5(self.fetcher.raw_content).hexdigest().upper(),
|
||||
len(self.fetcher.content))
|
||||
|
||||
fetcher.content = fetcher.content.replace('</body>', metadata + '</body>')
|
||||
self.fetcher.content = self.fetcher.content.replace('</body>', metadata + '</body>')
|
||||
|
||||
# Better would be if Watch.model could access the global data also
|
||||
# and then use getattr https://docs.python.org/3/reference/datamodel.html#object.__getitem__
|
||||
@@ -208,7 +137,7 @@ class perform_site_check(difference_detection_processor):
|
||||
if is_json:
|
||||
# Sort the JSON so we dont get false alerts when the content is just re-ordered
|
||||
try:
|
||||
fetcher.content = json.dumps(json.loads(fetcher.content), sort_keys=True)
|
||||
self.fetcher.content = json.dumps(json.loads(self.fetcher.content), sort_keys=True)
|
||||
except Exception as e:
|
||||
# Might have just been a snippet, or otherwise bad JSON, continue
|
||||
pass
|
||||
@@ -216,22 +145,22 @@ class perform_site_check(difference_detection_processor):
|
||||
if has_filter_rule:
|
||||
for filter in include_filters_rule:
|
||||
if any(prefix in filter for prefix in json_filter_prefixes):
|
||||
stripped_text_from_html += html_tools.extract_json_as_string(content=fetcher.content, json_filter=filter)
|
||||
stripped_text_from_html += html_tools.extract_json_as_string(content=self.fetcher.content, json_filter=filter)
|
||||
is_html = False
|
||||
|
||||
if is_html or is_source:
|
||||
if is_html or watch.is_source_type_url:
|
||||
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
fetcher.content = html_tools.workarounds_for_obfuscations(fetcher.content)
|
||||
html_content = fetcher.content
|
||||
self.fetcher.content = html_tools.workarounds_for_obfuscations(self.fetcher.content)
|
||||
html_content = self.fetcher.content
|
||||
|
||||
# If not JSON, and if it's not text/plain..
|
||||
if 'text/plain' in fetcher.get_all_headers().get('content-type', '').lower():
|
||||
if 'text/plain' in self.fetcher.get_all_headers().get('content-type', '').lower():
|
||||
# Don't run get_text or xpath/css filters on plaintext
|
||||
stripped_text_from_html = html_content
|
||||
else:
|
||||
# Does it have some ld+json price data? used for easier monitoring
|
||||
update_obj['has_ldjson_price_data'] = html_tools.has_ldjson_product_info(fetcher.content)
|
||||
update_obj['has_ldjson_price_data'] = html_tools.has_ldjson_product_info(self.fetcher.content)
|
||||
|
||||
# Then we assume HTML
|
||||
if has_filter_rule:
|
||||
@@ -241,13 +170,19 @@ class perform_site_check(difference_detection_processor):
|
||||
# For HTML/XML we offer xpath as an option, just start a regular xPath "/.."
|
||||
if filter_rule[0] == '/' or filter_rule.startswith('xpath:'):
|
||||
html_content += html_tools.xpath_filter(xpath_filter=filter_rule.replace('xpath:', ''),
|
||||
html_content=fetcher.content,
|
||||
append_pretty_line_formatting=not is_source)
|
||||
html_content=self.fetcher.content,
|
||||
append_pretty_line_formatting=not watch.is_source_type_url,
|
||||
is_rss=is_rss)
|
||||
elif filter_rule.startswith('xpath1:'):
|
||||
html_content += html_tools.xpath1_filter(xpath_filter=filter_rule.replace('xpath1:', ''),
|
||||
html_content=self.fetcher.content,
|
||||
append_pretty_line_formatting=not watch.is_source_type_url,
|
||||
is_rss=is_rss)
|
||||
else:
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
html_content += html_tools.include_filters(include_filters=filter_rule,
|
||||
html_content=fetcher.content,
|
||||
append_pretty_line_formatting=not is_source)
|
||||
html_content=self.fetcher.content,
|
||||
append_pretty_line_formatting=not watch.is_source_type_url)
|
||||
|
||||
if not html_content.strip():
|
||||
raise FilterNotFoundInResponse(include_filters_rule)
|
||||
@@ -255,15 +190,16 @@ class perform_site_check(difference_detection_processor):
|
||||
if has_subtractive_selectors:
|
||||
html_content = html_tools.element_removal(subtractive_selectors, html_content)
|
||||
|
||||
if is_source:
|
||||
if watch.is_source_type_url:
|
||||
stripped_text_from_html = html_content
|
||||
else:
|
||||
# extract text
|
||||
do_anchor = self.datastore.data["settings"]["application"].get("render_anchor_tag_content", False)
|
||||
stripped_text_from_html = \
|
||||
html_tools.html_to_text(
|
||||
html_content,
|
||||
render_anchor_tag_content=do_anchor
|
||||
html_content=html_content,
|
||||
render_anchor_tag_content=do_anchor,
|
||||
is_rss=is_rss # #1874 activate the <title workaround hack
|
||||
)
|
||||
|
||||
# Re #340 - return the content before the 'ignore text' was applied
|
||||
@@ -300,7 +236,7 @@ class perform_site_check(difference_detection_processor):
|
||||
empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
|
||||
if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0:
|
||||
raise content_fetcher.ReplyWithContentButNoText(url=url,
|
||||
status_code=fetcher.get_last_status_code(),
|
||||
status_code=self.fetcher.get_last_status_code(),
|
||||
screenshot=screenshot,
|
||||
has_filters=has_filter_rule,
|
||||
html_content=html_content
|
||||
@@ -309,7 +245,7 @@ class perform_site_check(difference_detection_processor):
|
||||
# We rely on the actual text in the html output.. many sites have random script vars etc,
|
||||
# in the future we'll implement other mechanisms.
|
||||
|
||||
update_obj["last_check_status"] = fetcher.get_last_status_code()
|
||||
update_obj["last_check_status"] = self.fetcher.get_last_status_code()
|
||||
|
||||
# If there's text to skip
|
||||
# @todo we could abstract out the get_text() to handle this cleaner
|
||||
@@ -397,7 +333,7 @@ class perform_site_check(difference_detection_processor):
|
||||
if is_html:
|
||||
if self.datastore.data['settings']['application'].get('extract_title_as_title') or watch['extract_title_as_title']:
|
||||
if not watch['title'] or not len(watch['title']):
|
||||
update_obj['title'] = html_tools.extract_element(find='title', html_content=fetcher.content)
|
||||
update_obj['title'] = html_tools.extract_element(find='title', html_content=self.fetcher.content)
|
||||
|
||||
if changed_detected:
|
||||
if watch.get('check_unique_lines', False):
|
||||
|
||||
@@ -18,6 +18,7 @@ module.exports = async ({page, context}) => {
|
||||
|
||||
await page.setBypassCSP(true)
|
||||
await page.setExtraHTTPHeaders(req_headers);
|
||||
|
||||
if (user_agent) {
|
||||
await page.setUserAgent(user_agent);
|
||||
}
|
||||
@@ -26,6 +27,10 @@ module.exports = async ({page, context}) => {
|
||||
await page.setDefaultNavigationTimeout(0);
|
||||
|
||||
if (proxy_username) {
|
||||
// Setting Proxy-Authentication header is deprecated, and doing so can trigger header change errors from Puppeteer
|
||||
// https://github.com/puppeteer/puppeteer/issues/676 ?
|
||||
// https://help.brightdata.com/hc/en-us/articles/12632549957649-Proxy-Manager-How-to-Guides#h_01HAKWR4Q0AFS8RZTNYWRDFJC2
|
||||
// https://cri.dev/posts/2020-03-30-How-to-solve-Puppeteer-Chrome-Error-ERR_INVALID_ARGUMENT/
|
||||
await page.authenticate({
|
||||
username: proxy_username,
|
||||
password: proxy_password
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
function isItemInStock() {
|
||||
// @todo Pass these in so the same list can be used in non-JS fetchers
|
||||
const outOfStockTexts = [
|
||||
' أخبرني عندما يتوفر',
|
||||
'0 in stock',
|
||||
'agotado',
|
||||
'article épuisé',
|
||||
'artikel zurzeit vergriffen',
|
||||
'as soon as stock is available',
|
||||
'ausverkauft', // sold out
|
||||
@@ -19,11 +21,14 @@ function isItemInStock() {
|
||||
'en rupture de stock',
|
||||
'ist derzeit nicht auf lager',
|
||||
'item is no longer available',
|
||||
'let me know when it\'s available',
|
||||
'message if back in stock',
|
||||
'nachricht bei',
|
||||
'nicht auf lager',
|
||||
'nicht lieferbar',
|
||||
'nicht zur verfügung',
|
||||
'niet beschikbaar',
|
||||
'niet leverbaar',
|
||||
'no disponible temporalmente',
|
||||
'no longer in stock',
|
||||
'no tickets available',
|
||||
@@ -36,13 +41,17 @@ function isItemInStock() {
|
||||
'out-of-stock',
|
||||
'produkt niedostępny',
|
||||
'sold out',
|
||||
'sold-out',
|
||||
'temporarily out of stock',
|
||||
'temporarily unavailable',
|
||||
'tickets unavailable',
|
||||
'tijdelijk uitverkocht',
|
||||
'unavailable tickets',
|
||||
'we do not currently have an estimate of when this product will be back in stock.',
|
||||
'zur zeit nicht an lager',
|
||||
'品切れ',
|
||||
'已售完',
|
||||
'품절'
|
||||
];
|
||||
|
||||
|
||||
@@ -105,4 +114,4 @@ function isItemInStock() {
|
||||
}
|
||||
|
||||
// returns the element text that makes it think it's out of stock
|
||||
return isItemInStock();
|
||||
return isItemInStock();
|
||||
|
||||
@@ -170,9 +170,12 @@ if (include_filters.length) {
|
||||
|
||||
try {
|
||||
// is it xpath?
|
||||
if (f.startsWith('/') || f.startsWith('xpath:')) {
|
||||
q = document.evaluate(f.replace('xpath:', ''), document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue;
|
||||
if (f.startsWith('/') || f.startsWith('xpath')) {
|
||||
var qry_f = f.replace(/xpath(:|\d:)/, '')
|
||||
console.log("[xpath] Scanning for included filter " + qry_f)
|
||||
q = document.evaluate(qry_f, document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue;
|
||||
} else {
|
||||
console.log("[css] Scanning for included filter " + f)
|
||||
q = document.querySelector(f);
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -182,8 +185,18 @@ if (include_filters.length) {
|
||||
}
|
||||
|
||||
if (q) {
|
||||
// Try to resolve //something/text() back to its /something so we can atleast get the bounding box
|
||||
try {
|
||||
if (typeof q.nodeName == 'string' && q.nodeName === '#text') {
|
||||
q = q.parentElement
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
console.log("xpath_element_scraper: #text resolver")
|
||||
}
|
||||
|
||||
// #1231 - IN the case XPath attribute filter is applied, we will have to traverse up and find the element.
|
||||
if (q.hasOwnProperty('getBoundingClientRect')) {
|
||||
if (typeof q.getBoundingClientRect == 'function') {
|
||||
bbox = q.getBoundingClientRect();
|
||||
console.log("xpath_element_scraper: Got filter element, scroll from top was " + scroll_y)
|
||||
} else {
|
||||
@@ -192,7 +205,8 @@ if (include_filters.length) {
|
||||
bbox = q.ownerElement.getBoundingClientRect();
|
||||
console.log("xpath_element_scraper: Got filter by ownerElement element, scroll from top was " + scroll_y)
|
||||
} catch (e) {
|
||||
console.log("xpath_element_scraper: error looking up ownerElement")
|
||||
console.log(e)
|
||||
console.log("xpath_element_scraper: error looking up q.ownerElement")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
44
changedetectionio/run_custom_browser_url_tests.sh
Executable file
@@ -0,0 +1,44 @@
|
||||
#!/bin/bash
|
||||
|
||||
# run some tests and look if the 'custom-browser-search-string=1' connect string appeared in the correct containers
|
||||
|
||||
# enable debug
|
||||
set -x
|
||||
|
||||
# A extra browser is configured, but we never chose to use it, so it should NOT show in the logs
|
||||
docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/custom_browser_url/test_custom_browser_url.py::test_request_not_via_custom_browser_url'
|
||||
docker logs browserless-custom-url &>log.txt
|
||||
grep 'custom-browser-search-string=1' log.txt
|
||||
if [ $? -ne 1 ]
|
||||
then
|
||||
echo "Saw a request in 'browserless-custom-url' container with 'custom-browser-search-string=1' when I should not"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
docker logs browserless &>log.txt
|
||||
grep 'custom-browser-search-string=1' log.txt
|
||||
if [ $? -ne 1 ]
|
||||
then
|
||||
echo "Saw a request in 'browser' container with 'custom-browser-search-string=1' when I should not"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Special connect string should appear in the custom-url container, but not in the 'default' one
|
||||
docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/custom_browser_url/test_custom_browser_url.py::test_request_via_custom_browser_url'
|
||||
docker logs browserless-custom-url &>log.txt
|
||||
grep 'custom-browser-search-string=1' log.txt
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "Did not see request in 'browserless-custom-url' container with 'custom-browser-search-string=1' when I should"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
docker logs browserless &>log.txt
|
||||
grep 'custom-browser-search-string=1' log.txt
|
||||
if [ $? -ne 1 ]
|
||||
then
|
||||
echo "Saw a request in 'browser' container with 'custom-browser-search-string=1' when I should not"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
@@ -10,6 +10,40 @@ set -x
|
||||
docker run --network changedet-network -d --name squid-one --hostname squid-one --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf ubuntu/squid:4.13-21.10_edge
|
||||
docker run --network changedet-network -d --name squid-two --hostname squid-two --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf ubuntu/squid:4.13-21.10_edge
|
||||
|
||||
# SOCKS5 related - start simple Socks5 proxy server
|
||||
# SOCKSTEST=xyz should show in the logs of this service to confirm it fetched
|
||||
docker run --network changedet-network -d --hostname socks5proxy --name socks5proxy -p 1080:1080 -e PROXY_USER=proxy_user123 -e PROXY_PASSWORD=proxy_pass123 serjs/go-socks5-proxy
|
||||
docker run --network changedet-network -d --hostname socks5proxy-noauth -p 1081:1080 --name socks5proxy-noauth serjs/go-socks5-proxy
|
||||
|
||||
echo "---------------------------------- SOCKS5 -------------------"
|
||||
# SOCKS5 related - test from proxies.json
|
||||
docker run --network changedet-network \
|
||||
-v `pwd`/tests/proxy_socks5/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json \
|
||||
--rm \
|
||||
-e "SOCKSTEST=proxiesjson" \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py'
|
||||
|
||||
# SOCKS5 related - by manually entering in UI
|
||||
docker run --network changedet-network \
|
||||
--rm \
|
||||
-e "SOCKSTEST=manual" \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy.py'
|
||||
|
||||
# SOCKS5 related - test from proxies.json via playwright - NOTE- PLAYWRIGHT DOESNT SUPPORT AUTHENTICATING PROXY
|
||||
docker run --network changedet-network \
|
||||
-e "SOCKSTEST=manual-playwright" \
|
||||
-v `pwd`/tests/proxy_socks5/proxies.json-example-noauth:/app/changedetectionio/test-datastore/proxies.json \
|
||||
-e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" \
|
||||
--rm \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py'
|
||||
|
||||
echo "socks5 server logs"
|
||||
docker logs socks5proxy
|
||||
echo "----------------------------------"
|
||||
|
||||
# Used for configuring a custom proxy URL via the UI
|
||||
docker run --network changedet-network -d \
|
||||
--name squid-custom \
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg width="15" height="16.363636" viewBox="0 0 15 16.363636" xmlns="http://www.w3.org/2000/svg" xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<svg width="15" height="16.363636" viewBox="0 0 15 16.363636" xmlns="http://www.w3.org/2000/svg" >
|
||||
<path d="m 14.318182,11.762045 v 1.1925 H 5.4102273 L 11.849318,7.1140909 C 12.234545,9.1561364 12.54,11.181818 14.318182,11.762045 Z m -6.7984093,4.601591 c 1.0759091,0 2.0256823,-0.955909 2.0256823,-2.045454 H 5.4545455 c 0,1.089545 0.9879545,2.045454 2.0652272,2.045454 z M 15,2.8622727 0.9177273,15.636136 0,14.627045 l 1.8443182,-1.6725 h -1.1625 v -1.1925 C 4.0070455,10.677273 2.1784091,4.5388636 5.3611364,2.6897727 5.8009091,2.4347727 6.0709091,1.9609091 6.0702273,1.4488636 v -0.00205 C 6.0702273,0.64772727 6.7104545,0 7.5,0 8.2895455,0 8.9297727,0.64772727 8.9297727,1.4468182 v 0.00205 C 8.9290909,1.9602319 9.199773,2.4354591 9.638864,2.6897773 10.364318,3.111141 10.827273,3.7568228 11.1525,4.5129591 L 14.085682,1.8531818 Z M 6.8181818,1.3636364 C 6.8181818,1.74 7.1236364,2.0454545 7.5,2.0454545 7.8763636,2.0454545 8.1818182,1.74 8.1818182,1.3636364 8.1818182,0.98795455 7.8763636,0.68181818 7.5,0.68181818 c -0.3763636,0 -0.6818182,0.30613637 -0.6818182,0.68181822 z" id="path2" style="fill:#f8321b;stroke-width:0.681818;fill-opacity:1"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.2 KiB After Width: | Height: | Size: 1.2 KiB |
@@ -10,7 +10,7 @@
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
>
|
||||
<defs
|
||||
id="defs16" />
|
||||
<sodipodi:namedview
|
||||
|
||||
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 11 KiB |
@@ -12,7 +12,7 @@
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"><defs
|
||||
><defs
|
||||
id="defs11" /><sodipodi:namedview
|
||||
id="namedview9"
|
||||
pagecolor="#ffffff"
|
||||
|
||||
|
Before Width: | Height: | Size: 2.5 KiB After Width: | Height: | Size: 2.5 KiB |
@@ -10,7 +10,7 @@
|
||||
viewBox="0 0 7.1975545 4.7993639"
|
||||
xml:space="preserve"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"><defs
|
||||
><defs
|
||||
id="defs19" />
|
||||
<g
|
||||
id="g14"
|
||||
|
||||
|
Before Width: | Height: | Size: 1.9 KiB After Width: | Height: | Size: 1.9 KiB |
@@ -9,7 +9,7 @@
|
||||
id="svg5"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
>
|
||||
<defs
|
||||
id="defs2" />
|
||||
<g
|
||||
|
||||
|
Before Width: | Height: | Size: 2.4 KiB After Width: | Height: | Size: 2.4 KiB |
@@ -10,7 +10,7 @@
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
>
|
||||
<defs
|
||||
id="defs12" />
|
||||
<sodipodi:namedview
|
||||
|
||||
|
Before Width: | Height: | Size: 9.7 KiB After Width: | Height: | Size: 9.7 KiB |
@@ -3,7 +3,6 @@
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
version="1.1"
|
||||
id="Capa_1"
|
||||
|
||||
|
Before Width: | Height: | Size: 2.9 KiB After Width: | Height: | Size: 2.8 KiB |
@@ -13,7 +13,6 @@
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"><sodipodi:namedview
|
||||
|
||||
|
Before Width: | Height: | Size: 3.5 KiB After Width: | Height: | Size: 3.4 KiB |
@@ -6,7 +6,7 @@
|
||||
version="1.1"
|
||||
id="svg6"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
>
|
||||
<defs
|
||||
id="defs10" />
|
||||
<path
|
||||
|
||||
|
Before Width: | Height: | Size: 892 B After Width: | Height: | Size: 854 B |
@@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg width="18" height="19.92" viewBox="0 0 18 19.92" xmlns="http://www.w3.org/2000/svg" xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<svg width="18" height="19.92" viewBox="0 0 18 19.92" xmlns="http://www.w3.org/2000/svg" >
|
||||
<path d="M -3,-2 H 21 V 22 H -3 Z" fill="none" id="path2"/>
|
||||
<path d="m 15,14.08 c -0.76,0 -1.44,0.3 -1.96,0.77 L 5.91,10.7 C 5.96,10.47 6,10.24 6,10 6,9.76 5.96,9.53 5.91,9.3 L 12.96,5.19 C 13.5,5.69 14.21,6 15,6 16.66,6 18,4.66 18,3 18,1.34 16.66,0 15,0 c -1.66,0 -3,1.34 -3,3 0,0.24 0.04,0.47 0.09,0.7 L 5.04,7.81 C 4.5,7.31 3.79,7 3,7 1.34,7 0,8.34 0,10 c 0,1.66 1.34,3 3,3 0.79,0 1.5,-0.31 2.04,-0.81 l 7.12,4.16 c -0.05,0.21 -0.08,0.43 -0.08,0.65 0,1.61 1.31,2.92 2.92,2.92 1.61,0 2.92,-1.31 2.92,-2.92 0,-1.61 -1.31,-2.92 -2.92,-2.92 z" id="path4" style="fill:#0078e7;fill-opacity:1"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 787 B After Width: | Height: | Size: 749 B |
@@ -321,8 +321,14 @@ $(document).ready(function () {
|
||||
var s = '<div class="control">' + '<a data-step-index=' + i + ' class="pure-button button-secondary button-green button-xsmall apply" >Apply</a> ';
|
||||
if (i > 0) {
|
||||
// The first step never gets these (Goto-site)
|
||||
s += '<a data-step-index=' + i + ' class="pure-button button-secondary button-xsmall clear" >Clear</a> ' +
|
||||
'<a data-step-index=' + i + ' class="pure-button button-secondary button-red button-xsmall remove" >Remove</a>';
|
||||
s += `<a data-step-index="${i}" class="pure-button button-secondary button-xsmall clear" >Clear</a> ` +
|
||||
`<a data-step-index="${i}" class="pure-button button-secondary button-red button-xsmall remove" >Remove</a>`;
|
||||
|
||||
// if a screenshot is available
|
||||
if (browser_steps_available_screenshots.includes(i.toString())) {
|
||||
var d = (browser_steps_last_error_step === i+1) ? 'before' : 'after';
|
||||
s += ` <a data-step-index="${i}" class="pure-button button-secondary button-xsmall show-screenshot" title="Show screenshot from last run" data-type="${d}">Pic</a> `;
|
||||
}
|
||||
}
|
||||
s += '</div>';
|
||||
$(this).append(s)
|
||||
@@ -437,6 +443,24 @@ $(document).ready(function () {
|
||||
|
||||
});
|
||||
|
||||
$('ul#browser_steps li .control .show-screenshot').click(function (element) {
|
||||
var step_n = $(event.currentTarget).data('step-index');
|
||||
w = window.open(this.href, "_blank", "width=640,height=480");
|
||||
const t = $(event.currentTarget).data('type');
|
||||
|
||||
const url = browser_steps_fetch_screenshot_image_url + `&step_n=${step_n}&type=${t}`;
|
||||
w.document.body.innerHTML = `<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<body>
|
||||
<img src="${url}" style="width: 100%" alt="Browser Step at step ${step_n} from last run." title="Browser Step at step ${step_n} from last run."/>
|
||||
</body>
|
||||
</html>`;
|
||||
w.document.title = `Browser Step at step ${step_n} from last run.`;
|
||||
});
|
||||
|
||||
if (browser_steps_last_error_step) {
|
||||
$("ul#browser_steps>li:nth-child("+browser_steps_last_error_step+")").addClass("browser-step-with-error");
|
||||
}
|
||||
|
||||
$("ul#browser_steps select").change(function () {
|
||||
set_greyed_state();
|
||||
|
||||
@@ -1,110 +1,120 @@
|
||||
var a = document.getElementById("a");
|
||||
var b = document.getElementById("b");
|
||||
var result = document.getElementById("result");
|
||||
$(document).ready(function () {
|
||||
var a = document.getElementById("a");
|
||||
var b = document.getElementById("b");
|
||||
var result = document.getElementById("result");
|
||||
var inputs;
|
||||
|
||||
function changed() {
|
||||
// https://github.com/kpdecker/jsdiff/issues/389
|
||||
// I would love to use `{ignoreWhitespace: true}` here but it breaks the formatting
|
||||
options = {
|
||||
ignoreWhitespace: document.getElementById("ignoreWhitespace").checked,
|
||||
};
|
||||
$('#jump-next-diff').click(function () {
|
||||
|
||||
var diff = Diff[window.diffType](a.textContent, b.textContent, options);
|
||||
var fragment = document.createDocumentFragment();
|
||||
for (var i = 0; i < diff.length; i++) {
|
||||
if (diff[i].added && diff[i + 1] && diff[i + 1].removed) {
|
||||
var swap = diff[i];
|
||||
diff[i] = diff[i + 1];
|
||||
diff[i + 1] = swap;
|
||||
var element = inputs[inputs.current];
|
||||
var headerOffset = 80;
|
||||
var elementPosition = element.getBoundingClientRect().top;
|
||||
var offsetPosition = elementPosition - headerOffset + window.scrollY;
|
||||
|
||||
window.scrollTo({
|
||||
top: offsetPosition,
|
||||
behavior: "smooth",
|
||||
});
|
||||
|
||||
inputs.current++;
|
||||
if (inputs.current >= inputs.length) {
|
||||
inputs.current = 0;
|
||||
}
|
||||
});
|
||||
|
||||
function changed() {
|
||||
// https://github.com/kpdecker/jsdiff/issues/389
|
||||
// I would love to use `{ignoreWhitespace: true}` here but it breaks the formatting
|
||||
options = {
|
||||
ignoreWhitespace: document.getElementById("ignoreWhitespace").checked,
|
||||
};
|
||||
|
||||
var diff = Diff[window.diffType](a.textContent, b.textContent, options);
|
||||
var fragment = document.createDocumentFragment();
|
||||
for (var i = 0; i < diff.length; i++) {
|
||||
if (diff[i].added && diff[i + 1] && diff[i + 1].removed) {
|
||||
var swap = diff[i];
|
||||
diff[i] = diff[i + 1];
|
||||
diff[i + 1] = swap;
|
||||
}
|
||||
|
||||
var node;
|
||||
if (diff[i].removed) {
|
||||
node = document.createElement("del");
|
||||
node.classList.add("change");
|
||||
const wrapper = node.appendChild(document.createElement("span"));
|
||||
wrapper.appendChild(document.createTextNode(diff[i].value));
|
||||
} else if (diff[i].added) {
|
||||
node = document.createElement("ins");
|
||||
node.classList.add("change");
|
||||
const wrapper = node.appendChild(document.createElement("span"));
|
||||
wrapper.appendChild(document.createTextNode(diff[i].value));
|
||||
} else {
|
||||
node = document.createTextNode(diff[i].value);
|
||||
}
|
||||
fragment.appendChild(node);
|
||||
}
|
||||
|
||||
result.textContent = "";
|
||||
result.appendChild(fragment);
|
||||
|
||||
// For nice mouse-over hover/title information
|
||||
const removed_current_option = $('#diff-version option:selected')
|
||||
if (removed_current_option) {
|
||||
$('del').each(function () {
|
||||
$(this).prop('title', 'Removed '+removed_current_option[0].label);
|
||||
});
|
||||
}
|
||||
const inserted_current_option = $('#current-version option:selected')
|
||||
if (removed_current_option) {
|
||||
$('ins').each(function () {
|
||||
$(this).prop('title', 'Inserted '+inserted_current_option[0].label);
|
||||
});
|
||||
}
|
||||
// Set the list of possible differences to jump to
|
||||
inputs = document.querySelectorAll('#diff-ui .change')
|
||||
// Set the "current" diff pointer
|
||||
inputs.current = 0;
|
||||
// Goto diff
|
||||
$('#jump-next-diff').click();
|
||||
}
|
||||
|
||||
var node;
|
||||
if (diff[i].removed) {
|
||||
node = document.createElement("del");
|
||||
node.classList.add("change");
|
||||
const wrapper = node.appendChild(document.createElement("span"));
|
||||
wrapper.appendChild(document.createTextNode(diff[i].value));
|
||||
} else if (diff[i].added) {
|
||||
node = document.createElement("ins");
|
||||
node.classList.add("change");
|
||||
const wrapper = node.appendChild(document.createElement("span"));
|
||||
wrapper.appendChild(document.createTextNode(diff[i].value));
|
||||
} else {
|
||||
node = document.createTextNode(diff[i].value);
|
||||
}
|
||||
fragment.appendChild(node);
|
||||
}
|
||||
|
||||
result.textContent = "";
|
||||
result.appendChild(fragment);
|
||||
|
||||
// Jump at start
|
||||
inputs.current = 0;
|
||||
next_diff();
|
||||
}
|
||||
|
||||
window.onload = function () {
|
||||
/* Convert what is options from UTC time.time() to local browser time */
|
||||
var diffList = document.getElementById("diff-version");
|
||||
if (typeof diffList != "undefined" && diffList != null) {
|
||||
for (var option of diffList.options) {
|
||||
var dateObject = new Date(option.value * 1000);
|
||||
option.label = dateObject.toLocaleString();
|
||||
}
|
||||
}
|
||||
|
||||
/* Set current version date as local time in the browser also */
|
||||
var current_v = document.getElementById("current-v-date");
|
||||
var dateObject = new Date(newest_version_timestamp * 1000);
|
||||
current_v.innerHTML = dateObject.toLocaleString();
|
||||
onDiffTypeChange(
|
||||
document.querySelector('#settings [name="diff_type"]:checked'),
|
||||
);
|
||||
changed();
|
||||
};
|
||||
|
||||
a.onpaste = a.onchange = b.onpaste = b.onchange = changed;
|
||||
|
||||
if ("oninput" in a) {
|
||||
a.oninput = b.oninput = changed;
|
||||
} else {
|
||||
a.onkeyup = b.onkeyup = changed;
|
||||
}
|
||||
|
||||
function onDiffTypeChange(radio) {
|
||||
window.diffType = radio.value;
|
||||
// Not necessary
|
||||
// document.title = "Diff " + radio.value.slice(4);
|
||||
}
|
||||
|
||||
var radio = document.getElementsByName("diff_type");
|
||||
for (var i = 0; i < radio.length; i++) {
|
||||
radio[i].onchange = function (e) {
|
||||
onDiffTypeChange(e.target);
|
||||
$('.needs-localtime').each(function () {
|
||||
for (var option of this.options) {
|
||||
var dateObject = new Date(option.value * 1000);
|
||||
option.label = dateObject.toLocaleString(undefined, {dateStyle: "full", timeStyle: "medium"});
|
||||
}
|
||||
})
|
||||
onDiffTypeChange(
|
||||
document.querySelector('#settings [name="diff_type"]:checked'),
|
||||
);
|
||||
changed();
|
||||
};
|
||||
}
|
||||
|
||||
document.getElementById("ignoreWhitespace").onchange = function (e) {
|
||||
changed();
|
||||
};
|
||||
a.onpaste = a.onchange = b.onpaste = b.onchange = changed;
|
||||
|
||||
var inputs = document.getElementsByClassName("change");
|
||||
inputs.current = 0;
|
||||
if ("oninput" in a) {
|
||||
a.oninput = b.oninput = changed;
|
||||
} else {
|
||||
a.onkeyup = b.onkeyup = changed;
|
||||
}
|
||||
|
||||
function next_diff() {
|
||||
var element = inputs[inputs.current];
|
||||
var headerOffset = 80;
|
||||
var elementPosition = element.getBoundingClientRect().top;
|
||||
var offsetPosition = elementPosition - headerOffset + window.scrollY;
|
||||
function onDiffTypeChange(radio) {
|
||||
window.diffType = radio.value;
|
||||
// Not necessary
|
||||
// document.title = "Diff " + radio.value.slice(4);
|
||||
}
|
||||
|
||||
window.scrollTo({
|
||||
top: offsetPosition,
|
||||
behavior: "smooth",
|
||||
});
|
||||
var radio = document.getElementsByName("diff_type");
|
||||
for (var i = 0; i < radio.length; i++) {
|
||||
radio[i].onchange = function (e) {
|
||||
onDiffTypeChange(e.target);
|
||||
changed();
|
||||
};
|
||||
}
|
||||
|
||||
document.getElementById("ignoreWhitespace").onchange = function (e) {
|
||||
changed();
|
||||
};
|
||||
|
||||
});
|
||||
|
||||
inputs.current++;
|
||||
if (inputs.current >= inputs.length) {
|
||||
inputs.current = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,4 @@
|
||||
$(document).ready(function () {
|
||||
function toggle() {
|
||||
if ($('input[name="application-fetch_backend"]:checked').val() != 'html_requests') {
|
||||
$('#requests-override-options').hide();
|
||||
$('#webdriver-override-options').show();
|
||||
} else {
|
||||
$('#requests-override-options').show();
|
||||
$('#webdriver-override-options').hide();
|
||||
}
|
||||
}
|
||||
|
||||
$('input[name="application-fetch_backend"]').click(function (e) {
|
||||
toggle();
|
||||
});
|
||||
toggle();
|
||||
|
||||
$("#api-key").hover(
|
||||
function () {
|
||||
$("#api-key-copy").html('copy').fadeIn();
|
||||
|
||||
@@ -24,14 +24,17 @@ $(document).ready(function() {
|
||||
})
|
||||
|
||||
data = {
|
||||
window_url : window.location.href,
|
||||
notification_urls : $('.notification-urls').val(),
|
||||
notification_body: $('#notification_body').val(),
|
||||
notification_format: $('#notification_format').val(),
|
||||
notification_title: $('#notification_title').val(),
|
||||
notification_urls: $('.notification-urls').val(),
|
||||
window_url: window.location.href,
|
||||
}
|
||||
for (key in data) {
|
||||
if (!data[key].length) {
|
||||
alert(key+" is empty, cannot send test.")
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
if (!data['notification_urls'].length) {
|
||||
alert("Notification URL list is empty, cannot send test.")
|
||||
return;
|
||||
}
|
||||
|
||||
$.ajax({
|
||||
|
||||
@@ -3,45 +3,50 @@
|
||||
* Toggles theme between light and dark mode.
|
||||
*/
|
||||
$(document).ready(function () {
|
||||
const button = document.getElementById("toggle-light-mode");
|
||||
const button = document.getElementById("toggle-light-mode");
|
||||
|
||||
button.onclick = () => {
|
||||
const htmlElement = document.getElementsByTagName("html");
|
||||
const isDarkMode = htmlElement[0].dataset.darkmode === "true";
|
||||
htmlElement[0].dataset.darkmode = !isDarkMode;
|
||||
setCookieValue(!isDarkMode);
|
||||
};
|
||||
button.onclick = () => {
|
||||
const htmlElement = document.getElementsByTagName("html");
|
||||
const isDarkMode = htmlElement[0].dataset.darkmode === "true";
|
||||
htmlElement[0].dataset.darkmode = !isDarkMode;
|
||||
setCookieValue(!isDarkMode);
|
||||
};
|
||||
|
||||
const setCookieValue = (value) => {
|
||||
document.cookie = `css_dark_mode=${value};max-age=31536000;path=/`
|
||||
}
|
||||
const setCookieValue = (value) => {
|
||||
document.cookie = `css_dark_mode=${value};max-age=31536000;path=/`
|
||||
}
|
||||
|
||||
// Search input box behaviour
|
||||
// Search input box behaviour
|
||||
const toggle_search = document.getElementById("toggle-search");
|
||||
const search_q = document.getElementById("search-q");
|
||||
window.addEventListener('keydown', function (e) {
|
||||
const search_q = document.getElementById("search-q");
|
||||
if(search_q) {
|
||||
window.addEventListener('keydown', function (e) {
|
||||
if (e.altKey == true && e.keyCode == 83) {
|
||||
search_q.classList.toggle('expanded');
|
||||
search_q.focus();
|
||||
}
|
||||
});
|
||||
|
||||
if (e.altKey == true && e.keyCode == 83)
|
||||
search_q.classList.toggle('expanded');
|
||||
search_q.focus();
|
||||
});
|
||||
|
||||
|
||||
search_q.onkeydown = (e) => {
|
||||
var key = e.keyCode || e.which;
|
||||
if (key === 13) {
|
||||
document.searchForm.submit();
|
||||
search_q.onkeydown = (e) => {
|
||||
var key = e.keyCode || e.which;
|
||||
if (key === 13) {
|
||||
document.searchForm.submit();
|
||||
}
|
||||
};
|
||||
toggle_search.onclick = () => {
|
||||
// Could be that they want to search something once text is in there
|
||||
if (search_q.value.length) {
|
||||
document.searchForm.submit();
|
||||
} else {
|
||||
// If not..
|
||||
search_q.classList.toggle('expanded');
|
||||
search_q.focus();
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
toggle_search.onclick = () => {
|
||||
// Could be that they want to search something once text is in there
|
||||
if (search_q.value.length) {
|
||||
document.searchForm.submit();
|
||||
} else {
|
||||
// If not..
|
||||
search_q.classList.toggle('expanded');
|
||||
search_q.focus();
|
||||
}
|
||||
};
|
||||
|
||||
$('#heart-us').click(function () {
|
||||
$("#overlay").toggleClass('visible');
|
||||
heartpath.style.fill = document.getElementById("overlay").classList.contains("visible") ? '#ff0000' : 'var(--color-background)';
|
||||
});
|
||||
});
|
||||
|
||||
29
changedetectionio/static/js/vis.js
Normal file
@@ -0,0 +1,29 @@
|
||||
$(document).ready(function () {
|
||||
|
||||
// Lazy Hide/Show elements mechanism
|
||||
$('[data-visible-for]').hide();
|
||||
function show_related_elem(e) {
|
||||
var n = $(e).attr('name') + "=" + $(e).val();
|
||||
if (n === 'fetch_backend=system') {
|
||||
n = "fetch_backend=" + default_system_fetch_backend;
|
||||
}
|
||||
$(`[data-visible-for~="${n}"]`).show();
|
||||
}
|
||||
$(':radio').on('keyup keypress blur change click', function (e) {
|
||||
$(`[data-visible-for]`).hide();
|
||||
$('.advanced-options').hide();
|
||||
show_related_elem(this);
|
||||
});
|
||||
|
||||
$(':radio:checked').each(function (e) {
|
||||
show_related_elem(this);
|
||||
})
|
||||
|
||||
|
||||
// Show advanced
|
||||
$('.show-advanced').click(function (e) {
|
||||
$(this).closest('.tab-pane-inner').find('.advanced-options').each(function (e) {
|
||||
$(this).toggle();
|
||||
})
|
||||
});
|
||||
});
|
||||
@@ -149,7 +149,7 @@ $(document).ready(function () {
|
||||
// @todo In the future paint all that match
|
||||
for (const c of current_default_xpath) {
|
||||
for (var i = selector_data['size_pos'].length; i !== 0; i--) {
|
||||
if (selector_data['size_pos'][i - 1].xpath === c) {
|
||||
if (selector_data['size_pos'][i - 1].xpath.trim() === c.trim()) {
|
||||
console.log("highlighting " + c);
|
||||
current_selected_i = i - 1;
|
||||
highlight_current_selected_i();
|
||||
|
||||
@@ -4,6 +4,14 @@ $(function () {
|
||||
$(this).closest('.unviewed').removeClass('unviewed');
|
||||
});
|
||||
|
||||
$('td[data-timestamp]').each(function () {
|
||||
$(this).prop('title', new Intl.DateTimeFormat(undefined,
|
||||
{
|
||||
dateStyle: 'full',
|
||||
timeStyle: 'long'
|
||||
}).format($(this).data('timestamp') * 1000));
|
||||
})
|
||||
|
||||
$("#checkbox-assign-tag").click(function (e) {
|
||||
$('#op_extradata').val(prompt("Enter a tag name"));
|
||||
});
|
||||
|
||||
@@ -1,40 +1,4 @@
|
||||
$(document).ready(function () {
|
||||
function toggle() {
|
||||
if ($('input[name="fetch_backend"]:checked').val() == 'html_webdriver') {
|
||||
if (playwright_enabled) {
|
||||
// playwright supports headers, so hide everything else
|
||||
// See #664
|
||||
$('#requests-override-options #request-method').hide();
|
||||
$('#requests-override-options #request-body').hide();
|
||||
|
||||
// @todo connect this one up
|
||||
$('#ignore-status-codes-option').hide();
|
||||
} else {
|
||||
// selenium/webdriver doesnt support anything afaik, hide it all
|
||||
$('#requests-override-options').hide();
|
||||
}
|
||||
|
||||
$('#webdriver-override-options').show();
|
||||
|
||||
} else if ($('input[name="fetch_backend"]:checked').val() == 'system') {
|
||||
$('#requests-override-options #request-method').hide();
|
||||
$('#requests-override-options #request-body').hide();
|
||||
$('#ignore-status-codes-option').hide();
|
||||
$('#requests-override-options').hide();
|
||||
$('#webdriver-override-options').hide();
|
||||
} else {
|
||||
|
||||
$('#requests-override-options').show();
|
||||
$('#requests-override-options *:hidden').show();
|
||||
$('#webdriver-override-options').hide();
|
||||
}
|
||||
}
|
||||
|
||||
$('input[name="fetch_backend"]').click(function (e) {
|
||||
toggle();
|
||||
});
|
||||
toggle();
|
||||
|
||||
$('#notification-setting-reset-to-default').click(function (e) {
|
||||
$('#notification_title').val('');
|
||||
$('#notification_body').val('');
|
||||
|
||||
@@ -187,6 +187,10 @@ ins {
|
||||
padding: 0.5em; }
|
||||
#settings ins {
|
||||
padding: 0.5em; }
|
||||
#settings option:checked {
|
||||
font-weight: bold; }
|
||||
#settings [type=radio], #settings [type=checkbox] {
|
||||
vertical-align: middle; }
|
||||
|
||||
.source {
|
||||
position: absolute;
|
||||
|
||||
@@ -77,6 +77,13 @@ ins {
|
||||
ins {
|
||||
padding: 0.5em;
|
||||
}
|
||||
|
||||
option:checked {
|
||||
font-weight: bold;
|
||||
}
|
||||
[type=radio],[type=checkbox] {
|
||||
vertical-align: middle;
|
||||
}
|
||||
}
|
||||
|
||||
.source {
|
||||
|
||||
@@ -6,6 +6,10 @@
|
||||
}
|
||||
|
||||
li {
|
||||
&.browser-step-with-error {
|
||||
background-color: #ffd6d6;
|
||||
border-radius: 4px;
|
||||
}
|
||||
&:not(:first-child) {
|
||||
&:hover {
|
||||
opacity: 1.0;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
|
||||
#toggle-light-mode {
|
||||
width: 3rem;
|
||||
/* width: 3rem;*/
|
||||
/* default */
|
||||
.icon-dark {
|
||||
display: none;
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
ul#requests-extra_browsers {
|
||||
list-style: none;
|
||||
/* tidy up the table to look more "inline" */
|
||||
li {
|
||||
> label {
|
||||
display: none;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* each proxy entry is a `table` */
|
||||
table {
|
||||
tr {
|
||||
display: inline;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#extra-browsers-setting {
|
||||
border: 1px solid var(--color-grey-800);
|
||||
border-radius: 4px;
|
||||
margin: 1em;
|
||||
padding: 1em;
|
||||
}
|
||||
@@ -60,3 +60,10 @@ body.proxy-check-active {
|
||||
|
||||
padding-bottom: 1em;
|
||||
}
|
||||
|
||||
#extra-proxies-setting {
|
||||
border: 1px solid var(--color-grey-800);
|
||||
border-radius: 4px;
|
||||
margin: 1em;
|
||||
padding: 1em;
|
||||
}
|
||||
|
||||
38
changedetectionio/static/styles/scss/parts/_love.scss
Normal file
@@ -0,0 +1,38 @@
|
||||
#overlay {
|
||||
|
||||
opacity: 0.95;
|
||||
position: fixed;
|
||||
|
||||
width: 350px;
|
||||
max-width: 100%;
|
||||
height: 100%;
|
||||
top: 0;
|
||||
right: -350px;
|
||||
background-color: var(--color-table-stripe);
|
||||
z-index: 2;
|
||||
|
||||
transform: translateX(0);
|
||||
transition: transform .5s ease;
|
||||
|
||||
|
||||
&.visible {
|
||||
transform: translateX(-100%);
|
||||
|
||||
}
|
||||
|
||||
.content {
|
||||
font-size: 0.875rem;
|
||||
padding: 1rem;
|
||||
margin-top: 5rem;
|
||||
max-width: 400px;
|
||||
color: var(--color-watch-table-row-text);
|
||||
}
|
||||
}
|
||||
|
||||
#heartpath {
|
||||
&:hover {
|
||||
fill: #ff0000 !important;
|
||||
transition: all ease 0.3s !important;
|
||||
}
|
||||
transition: all ease 0.3s !important;
|
||||
}
|
||||
25
changedetectionio/static/styles/scss/parts/_menu.scss
Normal file
@@ -0,0 +1,25 @@
|
||||
.pure-menu-link {
|
||||
padding: 0.5rem 1em;
|
||||
line-height: 1.2rem;
|
||||
}
|
||||
|
||||
.pure-menu-item {
|
||||
svg {
|
||||
height: 1.2rem;
|
||||
}
|
||||
* {
|
||||
vertical-align: middle;
|
||||
}
|
||||
.github-link {
|
||||
height: 1.8rem;
|
||||
display: block;
|
||||
svg {
|
||||
height: 100%;
|
||||
}
|
||||
}
|
||||
.bi-heart {
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
|
||||
#selector-wrapper {
|
||||
height: 100%;
|
||||
max-height: 70vh;
|
||||
overflow-y: scroll;
|
||||
position: relative;
|
||||
|
||||
//width: 100%;
|
||||
>img {
|
||||
position: absolute;
|
||||
z-index: 4;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
>canvas {
|
||||
position: relative;
|
||||
z-index: 5;
|
||||
max-width: 100%;
|
||||
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#selector-current-xpath {
|
||||
font-size: 80%;
|
||||
}
|
||||
@@ -5,14 +5,18 @@
|
||||
@import "parts/_arrows";
|
||||
@import "parts/_browser-steps";
|
||||
@import "parts/_extra_proxies";
|
||||
@import "parts/_extra_browsers";
|
||||
@import "parts/_pagination";
|
||||
@import "parts/_spinners";
|
||||
@import "parts/_variables";
|
||||
@import "parts/_darkmode";
|
||||
@import "parts/_menu";
|
||||
@import "parts/_love";
|
||||
|
||||
body {
|
||||
color: var(--color-text);
|
||||
background: var(--color-background-page);
|
||||
font-family: Helvetica Neue, Helvetica, Lucida Grande, Arial, Ubuntu, Cantarell, Fira Sans, sans-serif;
|
||||
}
|
||||
|
||||
.visually-hidden {
|
||||
@@ -55,11 +59,6 @@ a.github-link {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#toggle-search {
|
||||
width: 2rem;
|
||||
}
|
||||
|
||||
#search-q {
|
||||
opacity: 0;
|
||||
-webkit-transition: all .9s ease;
|
||||
@@ -403,8 +402,24 @@ label {
|
||||
}
|
||||
|
||||
#watch-add-wrapper-zone {
|
||||
>div {
|
||||
display: inline-block;
|
||||
|
||||
@media only screen and (min-width: 760px) {
|
||||
display: flex;
|
||||
gap: 0.3rem;
|
||||
flex-direction: row;
|
||||
}
|
||||
/* URL field grows always, other stay static in width */
|
||||
> span {
|
||||
flex-grow: 0;
|
||||
|
||||
input {
|
||||
width: 100%;
|
||||
padding-right: 1em;
|
||||
}
|
||||
|
||||
&:first-child {
|
||||
flex-grow: 1;
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 760px) {
|
||||
@@ -471,7 +486,11 @@ footer {
|
||||
padding: 10px;
|
||||
|
||||
&#left-sticky {
|
||||
left: 0px;
|
||||
left: 0;
|
||||
position: fixed;
|
||||
border-top-right-radius: 5px;
|
||||
border-bottom-right-radius: 5px;
|
||||
box-shadow: 1px 1px 4px var(--color-shadow-jump);
|
||||
}
|
||||
|
||||
&#right-sticky {
|
||||
@@ -939,37 +958,10 @@ ul {
|
||||
}
|
||||
}
|
||||
|
||||
#selector-wrapper {
|
||||
height: 100%;
|
||||
overflow-y: scroll;
|
||||
position: relative;
|
||||
@import "parts/_visualselector";
|
||||
|
||||
//width: 100%;
|
||||
>img {
|
||||
position: absolute;
|
||||
z-index: 4;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
>canvas {
|
||||
position: relative;
|
||||
z-index: 5;
|
||||
max-width: 100%;
|
||||
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#selector-current-xpath {
|
||||
font-size: 80%;
|
||||
}
|
||||
|
||||
#webdriver-override-options {
|
||||
input[type="number"] {
|
||||
#webdriver_delay {
|
||||
width: 5em;
|
||||
}
|
||||
}
|
||||
|
||||
#api-key {
|
||||
@@ -1103,3 +1095,4 @@ ul {
|
||||
border-radius: 3px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
|
||||
@@ -26,6 +26,9 @@
|
||||
#browser_steps li {
|
||||
list-style: decimal;
|
||||
padding: 5px; }
|
||||
#browser_steps li.browser-step-with-error {
|
||||
background-color: #ffd6d6;
|
||||
border-radius: 4px; }
|
||||
#browser_steps li:not(:first-child):hover {
|
||||
opacity: 1.0; }
|
||||
#browser_steps li .control {
|
||||
@@ -125,6 +128,27 @@ body.proxy-check-active #request .proxy-timing {
|
||||
border-radius: 4px;
|
||||
padding: 1em; }
|
||||
|
||||
#extra-proxies-setting {
|
||||
border: 1px solid var(--color-grey-800);
|
||||
border-radius: 4px;
|
||||
margin: 1em;
|
||||
padding: 1em; }
|
||||
|
||||
ul#requests-extra_browsers {
|
||||
list-style: none;
|
||||
/* tidy up the table to look more "inline" */
|
||||
/* each proxy entry is a `table` */ }
|
||||
ul#requests-extra_browsers li > label {
|
||||
display: none; }
|
||||
ul#requests-extra_browsers table tr {
|
||||
display: inline; }
|
||||
|
||||
#extra-browsers-setting {
|
||||
border: 1px solid var(--color-grey-800);
|
||||
border-radius: 4px;
|
||||
margin: 1em;
|
||||
padding: 1em; }
|
||||
|
||||
.pagination-page-info {
|
||||
color: #fff;
|
||||
font-size: 0.85rem;
|
||||
@@ -328,7 +352,7 @@ html[data-darkmode="true"] {
|
||||
color: var(--color-watch-table-error); }
|
||||
|
||||
#toggle-light-mode {
|
||||
width: 3rem;
|
||||
/* width: 3rem;*/
|
||||
/* default */ }
|
||||
#toggle-light-mode .icon-dark {
|
||||
display: none; }
|
||||
@@ -339,9 +363,56 @@ html[data-darkmode="true"] #toggle-light-mode .icon-light {
|
||||
html[data-darkmode="true"] #toggle-light-mode .icon-dark {
|
||||
display: block; }
|
||||
|
||||
.pure-menu-link {
|
||||
padding: 0.5rem 1em;
|
||||
line-height: 1.2rem; }
|
||||
|
||||
.pure-menu-item svg {
|
||||
height: 1.2rem; }
|
||||
|
||||
.pure-menu-item * {
|
||||
vertical-align: middle; }
|
||||
|
||||
.pure-menu-item .github-link {
|
||||
height: 1.8rem;
|
||||
display: block; }
|
||||
.pure-menu-item .github-link svg {
|
||||
height: 100%; }
|
||||
|
||||
.pure-menu-item .bi-heart:hover {
|
||||
cursor: pointer; }
|
||||
|
||||
#overlay {
|
||||
opacity: 0.95;
|
||||
position: fixed;
|
||||
width: 350px;
|
||||
max-width: 100%;
|
||||
height: 100%;
|
||||
top: 0;
|
||||
right: -350px;
|
||||
background-color: var(--color-table-stripe);
|
||||
z-index: 2;
|
||||
transform: translateX(0);
|
||||
transition: transform .5s ease; }
|
||||
#overlay.visible {
|
||||
transform: translateX(-100%); }
|
||||
#overlay .content {
|
||||
font-size: 0.875rem;
|
||||
padding: 1rem;
|
||||
margin-top: 5rem;
|
||||
max-width: 400px;
|
||||
color: var(--color-watch-table-row-text); }
|
||||
|
||||
#heartpath {
|
||||
transition: all ease 0.3s !important; }
|
||||
#heartpath:hover {
|
||||
fill: #ff0000 !important;
|
||||
transition: all ease 0.3s !important; }
|
||||
|
||||
body {
|
||||
color: var(--color-text);
|
||||
background: var(--color-background-page); }
|
||||
background: var(--color-background-page);
|
||||
font-family: Helvetica Neue, Helvetica, Lucida Grande, Arial, Ubuntu, Cantarell, Fira Sans, sans-serif; }
|
||||
|
||||
.visually-hidden {
|
||||
clip: rect(0 0 0 0);
|
||||
@@ -373,9 +444,6 @@ a.github-link {
|
||||
a.github-link:hover {
|
||||
color: var(--color-icon-github-hover); }
|
||||
|
||||
#toggle-search {
|
||||
width: 2rem; }
|
||||
|
||||
#search-q {
|
||||
opacity: 0;
|
||||
-webkit-transition: all .9s ease;
|
||||
@@ -615,11 +683,23 @@ label:hover {
|
||||
#new-watch-form legend {
|
||||
color: var(--color-text-legend);
|
||||
font-weight: bold; }
|
||||
#new-watch-form #watch-add-wrapper-zone > div {
|
||||
display: inline-block; }
|
||||
@media only screen and (max-width: 760px) {
|
||||
#new-watch-form #watch-add-wrapper-zone #url {
|
||||
width: 100%; } }
|
||||
#new-watch-form #watch-add-wrapper-zone {
|
||||
/* URL field grows always, other stay static in width */ }
|
||||
@media only screen and (min-width: 760px) {
|
||||
#new-watch-form #watch-add-wrapper-zone {
|
||||
display: flex;
|
||||
gap: 0.3rem;
|
||||
flex-direction: row; } }
|
||||
#new-watch-form #watch-add-wrapper-zone > span {
|
||||
flex-grow: 0; }
|
||||
#new-watch-form #watch-add-wrapper-zone > span input {
|
||||
width: 100%;
|
||||
padding-right: 1em; }
|
||||
#new-watch-form #watch-add-wrapper-zone > span:first-child {
|
||||
flex-grow: 1; }
|
||||
@media only screen and (max-width: 760px) {
|
||||
#new-watch-form #watch-add-wrapper-zone #url {
|
||||
width: 100%; } }
|
||||
|
||||
#diff-col {
|
||||
padding-left: 40px; }
|
||||
@@ -667,7 +747,11 @@ footer {
|
||||
background: var(--color-background);
|
||||
padding: 10px; }
|
||||
.sticky-tab#left-sticky {
|
||||
left: 0px; }
|
||||
left: 0;
|
||||
position: fixed;
|
||||
border-top-right-radius: 5px;
|
||||
border-bottom-right-radius: 5px;
|
||||
box-shadow: 1px 1px 4px var(--color-shadow-jump); }
|
||||
.sticky-tab#right-sticky {
|
||||
right: 0px; }
|
||||
.sticky-tab#hosted-sticky {
|
||||
@@ -976,6 +1060,7 @@ ul {
|
||||
|
||||
#selector-wrapper {
|
||||
height: 100%;
|
||||
max-height: 70vh;
|
||||
overflow-y: scroll;
|
||||
position: relative; }
|
||||
#selector-wrapper > img {
|
||||
@@ -992,7 +1077,7 @@ ul {
|
||||
#selector-current-xpath {
|
||||
font-size: 80%; }
|
||||
|
||||
#webdriver-override-options input[type="number"] {
|
||||
#webdriver_delay {
|
||||
width: 5em; }
|
||||
|
||||
#api-key:hover {
|
||||
|
||||
@@ -42,6 +42,7 @@ class ChangeDetectionStore:
|
||||
self.__data = App.model()
|
||||
self.datastore_path = datastore_path
|
||||
self.json_store_path = "{}/url-watches.json".format(self.datastore_path)
|
||||
print(">>> Datastore path is ", self.json_store_path)
|
||||
self.needs_write = False
|
||||
self.start_time = time.time()
|
||||
self.stop_thread = False
|
||||
@@ -95,6 +96,14 @@ class ChangeDetectionStore:
|
||||
self.add_watch(url='https://changedetection.io/CHANGELOG.txt',
|
||||
tag='changedetection.io',
|
||||
extras={'fetch_backend': 'html_requests'})
|
||||
|
||||
updates_available = self.get_updates_available()
|
||||
self.__data['settings']['application']['schema_version'] = updates_available.pop()
|
||||
|
||||
else:
|
||||
# Bump the update version by running updates
|
||||
self.run_updates()
|
||||
|
||||
self.__data['version_tag'] = version_tag
|
||||
|
||||
# Just to test that proxies.json if it exists, doesnt throw a parsing error on startup
|
||||
@@ -124,9 +133,6 @@ class ChangeDetectionStore:
|
||||
secret = secrets.token_hex(16)
|
||||
self.__data['settings']['application']['api_access_token'] = secret
|
||||
|
||||
# Bump the update version by running updates
|
||||
self.run_updates()
|
||||
|
||||
self.needs_write = True
|
||||
|
||||
# Finally start the thread that will manage periodic data saves to JSON
|
||||
@@ -228,7 +234,7 @@ class ChangeDetectionStore:
|
||||
|
||||
# Probably their should be dict...
|
||||
for watch in self.data['watching'].values():
|
||||
if watch['url'] == url:
|
||||
if watch['url'].lower() == url.lower():
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -238,12 +244,17 @@ class ChangeDetectionStore:
|
||||
import pathlib
|
||||
|
||||
self.__data['watching'][uuid].update({
|
||||
'last_checked': 0,
|
||||
'browser_steps_last_error_step' : None,
|
||||
'check_count': 0,
|
||||
'fetch_time' : 0.0,
|
||||
'has_ldjson_price_data': None,
|
||||
'in_stock': None,
|
||||
'last_checked': 0,
|
||||
'last_error': False,
|
||||
'last_notification_error': False,
|
||||
'last_viewed': 0,
|
||||
'previous_md5': False,
|
||||
'previous_md5_before_filters': False,
|
||||
'track_ldjson_price_data': None,
|
||||
})
|
||||
|
||||
@@ -323,7 +334,8 @@ class ChangeDetectionStore:
|
||||
|
||||
# Or if UUIDs given directly
|
||||
if tag_uuids:
|
||||
apply_extras['tags'] = list(set(apply_extras['tags'] + tag_uuids))
|
||||
for t in tag_uuids:
|
||||
apply_extras['tags'] = list(set(apply_extras['tags'] + [t.strip()]))
|
||||
|
||||
# Make any uuids unique
|
||||
if apply_extras.get('tags'):
|
||||
@@ -350,6 +362,8 @@ class ChangeDetectionStore:
|
||||
if write_to_disk_now:
|
||||
self.sync_to_json()
|
||||
|
||||
print("added ", url)
|
||||
|
||||
return new_uuid
|
||||
|
||||
def visualselector_data_is_ready(self, watch_uuid):
|
||||
@@ -621,17 +635,23 @@ class ChangeDetectionStore:
|
||||
|
||||
return {}
|
||||
|
||||
@property
|
||||
def extra_browsers(self):
|
||||
res = []
|
||||
p = list(filter(
|
||||
lambda s: (s.get('browser_name') and s.get('browser_connection_url')),
|
||||
self.__data['settings']['requests'].get('extra_browsers', [])))
|
||||
if p:
|
||||
for i in p:
|
||||
res.append(("extra_browser_"+i['browser_name'], i['browser_name']))
|
||||
|
||||
return res
|
||||
|
||||
def tag_exists_by_name(self, tag_name):
|
||||
return any(v.get('title', '').lower() == tag_name.lower() for k, v in self.__data['settings']['application']['tags'].items())
|
||||
|
||||
# Run all updates
|
||||
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
|
||||
# So therefor - each `update_n` should be very careful about checking if it needs to actually run
|
||||
# Probably we should bump the current update schema version with each tag release version?
|
||||
def run_updates(self):
|
||||
def get_updates_available(self):
|
||||
import inspect
|
||||
import shutil
|
||||
|
||||
updates_available = []
|
||||
for i, o in inspect.getmembers(self, predicate=inspect.ismethod):
|
||||
m = re.search(r'update_(\d+)$', i)
|
||||
@@ -639,6 +659,15 @@ class ChangeDetectionStore:
|
||||
updates_available.append(int(m.group(1)))
|
||||
updates_available.sort()
|
||||
|
||||
return updates_available
|
||||
|
||||
# Run all updates
|
||||
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
|
||||
# So therefor - each `update_n` should be very careful about checking if it needs to actually run
|
||||
# Probably we should bump the current update schema version with each tag release version?
|
||||
def run_updates(self):
|
||||
import shutil
|
||||
updates_available = self.get_updates_available()
|
||||
for update_n in updates_available:
|
||||
if update_n > self.__data['settings']['application']['schema_version']:
|
||||
print ("Applying update_{}".format((update_n)))
|
||||
@@ -820,4 +849,14 @@ class ChangeDetectionStore:
|
||||
if not watch.get('date_created'):
|
||||
self.data['watching'][uuid]['date_created'] = i
|
||||
i+=1
|
||||
return
|
||||
return
|
||||
|
||||
# #1774 - protect xpath1 against migration
|
||||
def update_14(self):
|
||||
for awatch in self.__data["watching"]:
|
||||
if self.__data["watching"][awatch]['include_filters']:
|
||||
for num, selector in enumerate(self.__data["watching"][awatch]['include_filters']):
|
||||
if selector.startswith('/'):
|
||||
self.__data["watching"][awatch]['include_filters'][num] = 'xpath1:' + selector
|
||||
if selector.startswith('xpath:'):
|
||||
self.__data["watching"][awatch]['include_filters'][num] = selector.replace('xpath:', 'xpath1:', 1)
|
||||
|
||||
@@ -13,10 +13,10 @@
|
||||
<div class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Use <a target=_new href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service! <i><a target=_new href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes">Please read the notification services wiki here for important configuration notes</a></i>.</li>
|
||||
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_discord">discord://</a></code> (or <code>https://discord.com/api/webhooks...</code>)) </code> only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li>
|
||||
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_discord">discord://</a></code> (or <code>https://discord.com/api/webhooks...</code>)) only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li>
|
||||
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> bots can't send messages to other bots, so you should specify chat ID of non-bot user.</li>
|
||||
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li>
|
||||
<li><code>gets://</code>, <code>posts://</code>, <code>puts://</code>, <code>deletes://</code> for direct API calls (or omit the "<code>s</code>" for non-SSL ie <code>get://</code>)</li>
|
||||
<li><code>gets://</code>, <code>posts://</code>, <code>puts://</code>, <code>deletes://</code> for direct API calls (or omit the "<code>s</code>" for non-SSL ie <code>get://</code>) <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes#postposts">more help here</a></li>
|
||||
<li>Accepts the <code>{{ '{{token}}' }}</code> placeholders listed below</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
@@ -39,6 +39,24 @@
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro render_nolabel_field(field) %}
|
||||
<span>
|
||||
{{ field(**kwargs)|safe }}
|
||||
{% if field.errors %}
|
||||
<span class="error">
|
||||
{% if field.errors %}
|
||||
<ul class=errors>
|
||||
{% for error in field.errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</span>
|
||||
{% endif %}
|
||||
</span>
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro render_button(field) %}
|
||||
{{ field(**kwargs)|safe }}
|
||||
{% endmacro %}
|
||||
@@ -8,10 +8,10 @@
|
||||
<title>Change Detection{{extra_title}}</title>
|
||||
<link rel="alternate" type="application/rss+xml" title="Changedetection.io » Feed{% if active_tag %}- {{active_tag}}{% endif %}" href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}" >
|
||||
<link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='pure-min.css')}}" >
|
||||
<link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='styles.css')}}" >
|
||||
<link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='styles.css')}}?v={{ get_css_version() }}" >
|
||||
{% if extra_stylesheets %}
|
||||
{% for m in extra_stylesheets %}
|
||||
<link rel="stylesheet" href="{{ m }}?ver=1000" >
|
||||
<link rel="stylesheet" href="{{ m }}?ver={{ get_css_version() }}" >
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
@@ -85,6 +85,7 @@
|
||||
<a href="{{url_for('logout')}}" class="pure-menu-link">LOG OUT</a>
|
||||
</li>
|
||||
{% endif %}
|
||||
{% if current_user.is_authenticated or not has_password %}
|
||||
<li class="pure-menu-item pure-form" id="search-menu-item">
|
||||
<!-- We use GET here so it offers people a chance to set bookmarks etc -->
|
||||
<form name="searchForm" action="" method="GET">
|
||||
@@ -95,6 +96,7 @@
|
||||
</button>
|
||||
</form>
|
||||
</li>
|
||||
{% endif %}
|
||||
<li class="pure-menu-item">
|
||||
<button class="toggle-button" id ="toggle-light-mode" type="button" title="Toggle Light/Dark Mode">
|
||||
<span class="visually-hidden">Toggle light/dark mode</span>
|
||||
@@ -106,6 +108,20 @@
|
||||
</span>
|
||||
</button>
|
||||
</li>
|
||||
<li class="pure-menu-item" id="heart-us">
|
||||
<svg
|
||||
fill="#ff0000"
|
||||
class="bi bi-heart"
|
||||
preserveAspectRatio="xMidYMid meet"
|
||||
viewBox="0 0 16.9 16.1"
|
||||
id="svg-heart"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path id="heartpath" d="M 5.338316,0.50302766 C 0.71136983,0.50647126 -3.9576371,7.2707777 8.5004254,15.503028 23.833425,5.3700277 13.220206,-2.5384409 8.6762066,1.6475589 c -0.060791,0.054322 -0.11943,0.1110064 -0.1757812,0.1699219 -0.057,-0.059 -0.1157813,-0.116875 -0.1757812,-0.171875 C 7.4724566,0.86129334 6.4060729,0.50223298 5.338316,0.50302766 Z"
|
||||
style="fill:var(--color-background);fill-opacity:1;stroke:#ff0000;stroke-opacity:1" />
|
||||
</svg>
|
||||
|
||||
</li>
|
||||
<li class="pure-menu-item">
|
||||
<a class="github-link" href="https://github.com/dgtlmoon/changedetection.io">
|
||||
{% include "svgs/github.svg" %}
|
||||
@@ -121,14 +137,51 @@
|
||||
{% endif %}
|
||||
{% if left_sticky %}
|
||||
<div class="sticky-tab" id="left-sticky">
|
||||
<a href="{{url_for('preview_page', uuid=uuid)}}">Show current snapshot</a>
|
||||
<a href="{{url_for('preview_page', uuid=uuid)}}">Show current snapshot</a><br>
|
||||
Visualise <strong>triggers</strong> and <strong>ignored text</strong>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if right_sticky %}
|
||||
<div class="sticky-tab" id="right-sticky">{{ right_sticky }}</div>
|
||||
{% endif %}
|
||||
<section class="content">
|
||||
<header>
|
||||
<div id="overlay">
|
||||
<div class="content">
|
||||
<strong>changedetection.io needs your support!</strong><br>
|
||||
<p>
|
||||
You can help us by supporting changedetection.io on these platforms;
|
||||
</p>
|
||||
<p>
|
||||
<ul>
|
||||
<li>
|
||||
<a href="https://alternativeto.net/software/changedetection-io/about/">Rate us at
|
||||
AlternativeTo.net</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="https://github.com/dgtlmoon/changedetection.io">Star us on GitHub</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="https://twitter.com/change_det_io">Follow us at Twitter/X</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="https://www.linkedin.com/company/changedetection-io">Check us out on LinkedIn</a>
|
||||
</li>
|
||||
<li>
|
||||
And tell your friends and colleagues :)
|
||||
</li>
|
||||
</ul>
|
||||
<p>
|
||||
The more popular changedetection.io is, the more time we can dedicate to adding amazing features!
|
||||
</p>
|
||||
<p>
|
||||
Many thanks :)<br>
|
||||
</p>
|
||||
<p>
|
||||
<i>changedetection.io team</i>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<header>
|
||||
{% block header %}{% endblock %}
|
||||
</header>
|
||||
|
||||
|
||||
@@ -13,10 +13,31 @@
|
||||
<script src="{{url_for('static_content', group='js', filename='diff-overview.js')}}" defer></script>
|
||||
|
||||
<div id="settings">
|
||||
<h1>Differences</h1>
|
||||
<form class="pure-form " action="" method="GET">
|
||||
<fieldset>
|
||||
|
||||
{% if versions|length >= 1 %}
|
||||
<strong>Compare</strong>
|
||||
<del class="change"><span>from</span></del>
|
||||
<select id="diff-version" name="from_version" class="needs-localtime">
|
||||
{% for version in versions|reverse %}
|
||||
<option value="{{ version }}" {% if version== from_version %} selected="" {% endif %}>
|
||||
{{ version }}
|
||||
</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<ins class="change"><span>to</span></ins>
|
||||
<select id="current-version" name="to_version" class="needs-localtime">
|
||||
{% for version in versions|reverse %}
|
||||
<option value="{{ version }}" {% if version== to_version %} selected="" {% endif %}>
|
||||
{{ version }}
|
||||
</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<button type="submit" class="pure-button pure-button-primary">Go</button>
|
||||
{% endif %}
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
<strong>Style</strong>
|
||||
<label for="diffWords" class="pure-checkbox">
|
||||
<input type="radio" name="diff_type" id="diffWords" value="diffWords"> Words</label>
|
||||
<label for="diffLines" class="pure-checkbox">
|
||||
@@ -26,32 +47,20 @@
|
||||
<input type="radio" name="diff_type" id="diffChars" value="diffChars"> Chars</label>
|
||||
<!-- @todo - when mimetype is JSON, select this by default? -->
|
||||
<label for="diffJson" class="pure-checkbox">
|
||||
<input type="radio" name="diff_type" id="diffJson" value="diffJson" > JSON</label>
|
||||
<input type="radio" name="diff_type" id="diffJson" value="diffJson"> JSON</label>
|
||||
|
||||
{% if versions|length >= 1 %}
|
||||
<label for="diff-version">Compare newest (<span id="current-v-date"></span>) with</label>
|
||||
<select id="diff-version" name="previous_version">
|
||||
{% for version in versions|reverse %}
|
||||
<option value="{{version}}" {% if version== current_previous_version %} selected="" {% endif %}>
|
||||
{{version}}
|
||||
</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<button type="submit" class="pure-button pure-button-primary">Go</button>
|
||||
{% endif %}
|
||||
</fieldset>
|
||||
</form>
|
||||
<del>Removed text</del>
|
||||
<ins>Inserted Text</ins>
|
||||
<span>
|
||||
<span>
|
||||
<!-- https://github.com/kpdecker/jsdiff/issues/389 ? -->
|
||||
<label for="ignoreWhitespace" class="pure-checkbox" id="label-diff-ignorewhitespace">
|
||||
<input type="checkbox" id="ignoreWhitespace" name="ignoreWhitespace" > Ignore Whitespace</label>
|
||||
<input type="checkbox" id="ignoreWhitespace" name="ignoreWhitespace"> Ignore Whitespace</label>
|
||||
</span>
|
||||
</fieldset>
|
||||
</form>
|
||||
|
||||
</div>
|
||||
|
||||
<div id="diff-jump">
|
||||
<a onclick="next_diff();">Jump</a>
|
||||
<a id="jump-next-diff" title="Jump to next difference">Jump</a>
|
||||
</div>
|
||||
|
||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
@@ -79,8 +88,6 @@
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="text">
|
||||
<div class="tip">Pro-tip: Use <strong>show current snapshot</strong> tab to visualise what will be ignored, highlight text to add to ignore filters</div>
|
||||
|
||||
{% if password_enabled_and_share_is_off %}
|
||||
<div class="tip">Pro-tip: You can enable <strong>"share access when password is enabled"</strong> from settings</div>
|
||||
{% endif %}
|
||||
@@ -91,8 +98,8 @@
|
||||
<tbody>
|
||||
<tr>
|
||||
<!-- just proof of concept copied straight from github.com/kpdecker/jsdiff -->
|
||||
<td id="a" style="display: none;">{{previous}}</td>
|
||||
<td id="b" style="display: none;">{{newest}}</td>
|
||||
<td id="a" style="display: none;">{{from_version_file_contents}}</td>
|
||||
<td id="b" style="display: none;">{{to_version_file_contents}}</td>
|
||||
<td id="diff-col">
|
||||
<span id="result" class="highlightable-filter"></span>
|
||||
</td>
|
||||
|
||||
@@ -3,21 +3,24 @@
|
||||
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script>
|
||||
<script>
|
||||
|
||||
const browser_steps_available_screenshots=JSON.parse('{{ watch.get_browsersteps_available_screenshots|tojson }}');
|
||||
const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}');
|
||||
const browser_steps_fetch_screenshot_image_url="{{url_for('browser_steps.browser_steps_fetch_screenshot_image', uuid=uuid)}}";
|
||||
const browser_steps_last_error_step={{ watch.browser_steps_last_error_step|tojson }};
|
||||
const browser_steps_start_url="{{url_for('browser_steps.browsersteps_start_session', uuid=uuid)}}";
|
||||
const browser_steps_sync_url="{{url_for('browser_steps.browsersteps_ui_update', uuid=uuid)}}";
|
||||
{% if emailprefix %}
|
||||
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
|
||||
{% endif %}
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test', watch_uuid=uuid)}}";
|
||||
const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %};
|
||||
const recheck_proxy_start_url="{{url_for('check_proxies.start_check', uuid=uuid)}}";
|
||||
const proxy_recheck_status_url="{{url_for('check_proxies.get_recheck_status', uuid=uuid)}}";
|
||||
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
|
||||
const watch_visual_selector_data_url="{{url_for('static_content', group='visual_selector_data', filename=uuid)}}";
|
||||
|
||||
const default_system_fetch_backend="{{ settings_application['fetch_backend'] }}";
|
||||
</script>
|
||||
|
||||
<script src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script>
|
||||
@@ -49,6 +52,7 @@
|
||||
<li class="tab"><a href="#restock">Restock Detection</a></li>
|
||||
{% endif %}
|
||||
<li class="tab"><a href="#notifications">Notifications</a></li>
|
||||
<li class="tab"><a href="#stats">Stats</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
@@ -109,7 +113,7 @@
|
||||
<span class="pure-form-message-inline">
|
||||
<p>Use the <strong>Basic</strong> method (default) where your watched site doesn't need Javascript to render.</p>
|
||||
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>
|
||||
Tip: <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support">Connect using BrightData Proxies, find out more here.</a>
|
||||
Tip: <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support">Connect using Bright Data and Oxylabs Proxies, find out more here.</a>
|
||||
</span>
|
||||
</div>
|
||||
{% if form.proxy %}
|
||||
@@ -121,10 +125,9 @@
|
||||
</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_checkbox_field(form.ignore_status_codes) }}
|
||||
</div>
|
||||
<fieldset id="webdriver-override-options">
|
||||
|
||||
<!-- webdriver always -->
|
||||
<fieldset data-visible-for="fetch_backend=html_webdriver" style="display: none;">
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.webdriver_delay) }}
|
||||
<div class="pure-form-message-inline">
|
||||
@@ -137,23 +140,40 @@
|
||||
</div>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
<a class="pure-button button-secondary button-xsmall show-advanced">Show advanced options</a>
|
||||
</div>
|
||||
<div class="advanced-options" style="display: none;">
|
||||
{{ render_field(form.webdriver_js_execute_code) }}
|
||||
<div class="pure-form-message-inline">
|
||||
Run this code before performing change detection, handy for filling in fields and other actions <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Run-JavaScript-before-change-detection">More help and examples here</a>
|
||||
Run this code before performing change detection, handy for filling in fields and other
|
||||
actions <a
|
||||
href="https://github.com/dgtlmoon/changedetection.io/wiki/Run-JavaScript-before-change-detection">More
|
||||
help and examples here</a>
|
||||
</div>
|
||||
</div>
|
||||
</fieldset>
|
||||
<fieldset class="pure-group" id="requests-override-options">
|
||||
{% if not playwright_enabled %}
|
||||
<div class="pure-form-message-inline">
|
||||
<strong>Request override is currently only used by the <i>Basic fast Plaintext/HTTP Client</i> method.</strong>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="pure-control-group" id="request-method">
|
||||
{{ render_field(form.method) }}
|
||||
<!-- html requests always -->
|
||||
<fieldset data-visible-for="fetch_backend=html_requests">
|
||||
<div class="pure-control-group">
|
||||
<a class="pure-button button-secondary button-xsmall show-advanced">Show advanced options</a>
|
||||
</div>
|
||||
<div class="pure-control-group" id="request-headers">
|
||||
{{ render_field(form.headers, rows=5, placeholder="Example
|
||||
<div class="advanced-options" style="display: none;">
|
||||
<div class="pure-control-group" id="request-method">
|
||||
{{ render_field(form.method) }}
|
||||
</div>
|
||||
<div id="request-body">
|
||||
{{ render_field(form.body, rows=5, placeholder="Example
|
||||
{
|
||||
\"name\":\"John\",
|
||||
\"age\":30,
|
||||
\"car\":null
|
||||
}") }}
|
||||
</div>
|
||||
</div>
|
||||
</fieldset>
|
||||
<!-- hmm -->
|
||||
<div class="pure-control-group advanced-options" style="display: none;">
|
||||
{{ render_field(form.headers, rows=5, placeholder="Example
|
||||
Cookie: foobar
|
||||
User-Agent: wonderbra 1.0") }}
|
||||
|
||||
@@ -166,17 +186,12 @@ User-Agent: wonderbra 1.0") }}
|
||||
<br>
|
||||
(Not supported by Selenium browser)
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<div class="pure-control-group" id="request-body">
|
||||
{{ render_field(form.body, rows=5, placeholder="Example
|
||||
{
|
||||
\"name\":\"John\",
|
||||
\"age\":30,
|
||||
\"car\":null
|
||||
}") }}
|
||||
<fieldset data-visible-for="fetch_backend=html_requests fetch_backend=html_webdriver" >
|
||||
<div class="pure-control-group inline-radio advanced-options" style="display: none;">
|
||||
{{ render_checkbox_field(form.ignore_status_codes) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
</fieldset>
|
||||
</div>
|
||||
{% if playwright_enabled %}
|
||||
<div class="tab-pane-inner" id="browser-steps">
|
||||
@@ -287,11 +302,12 @@ xpath://body/div/span[contains(@class, 'example-class')]",
|
||||
{% endif %}
|
||||
</ul>
|
||||
</li>
|
||||
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash,
|
||||
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash. To specify XPath to be used explicitly or the XPath rule starts with an XPath function: Prefix with <code>xpath:</code>
|
||||
<ul>
|
||||
<li>Example: <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
|
||||
<li>Example: <code>//*[contains(@class, 'sametext')]</code> or <code>xpath:count(//*[contains(@class, 'sametext')])</code>, <a
|
||||
href="http://xpather.com/" target="new">test your XPath here</a></li>
|
||||
<li>Example: Get all titles from an RSS feed <code>//title/text()</code></li>
|
||||
<li>To use XPath1.0: Prefix with <code>xpath1:</code></li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
@@ -441,7 +457,35 @@ Unavailable") }}
|
||||
</fieldset>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="tab-pane-inner" id="stats">
|
||||
<div class="pure-control-group">
|
||||
<style>
|
||||
#stats-table tr > td:first-child {
|
||||
font-weight: bold;
|
||||
}
|
||||
</style>
|
||||
<table class="pure-table" id="stats-table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Check count</td>
|
||||
<td>{{ "{:,}".format( watch.check_count) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Consecutive filter failures</td>
|
||||
<td>{{ "{:,}".format( watch.consecutive_filter_failures) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>History length</td>
|
||||
<td>{{ "{:,}".format(watch.history|length) }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Last fetch time</td>
|
||||
<td>{{ watch.fetch_time }}s</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="actions">
|
||||
<div class="pure-control-group">
|
||||
{{ render_button(form.save_button) }}
|
||||
|
||||
@@ -8,11 +8,12 @@
|
||||
<ul>
|
||||
<li class="tab" id=""><a href="#url-list">URL List</a></li>
|
||||
<li class="tab"><a href="#distill-io">Distill.io</a></li>
|
||||
<li class="tab"><a href="#xlsx">.XLSX & Wachete</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="box-wrap inner">
|
||||
<form class="pure-form pure-form-aligned" action="{{url_for('import_page')}}" method="POST">
|
||||
<form class="pure-form" action="{{url_for('import_page')}}" method="POST" enctype="multipart/form-data">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<div class="tab-pane-inner" id="url-list">
|
||||
<legend>
|
||||
@@ -79,6 +80,42 @@
|
||||
" rows="25">{{ original_distill_json }}</textarea>
|
||||
|
||||
</div>
|
||||
<div class="tab-pane-inner" id="xlsx">
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.xlsx_file, class="processor") }}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.file_mapping, class="processor") }}
|
||||
</div>
|
||||
</fieldset>
|
||||
<div class="pure-control-group">
|
||||
<span class="pure-form-message-inline">
|
||||
Table of custom column and data types mapping for the <strong>Custom mapping</strong> File mapping type.
|
||||
</span>
|
||||
<table style="border: 1px solid #aaa; padding: 0.5rem; border-radius: 4px;">
|
||||
<tr>
|
||||
<td><strong>Column #</strong></td>
|
||||
{% for n in range(4) %}
|
||||
<td><input type="number" name="custom_xlsx[col_{{n}}]" style="width: 4rem;" min="1"></td>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>Type</strong></td>
|
||||
{% for n in range(4) %}
|
||||
<td><select name="custom_xlsx[col_type_{{n}}]">
|
||||
<option value="" style="color: #aaa"> -- none --</option>
|
||||
<option value="url">URL</option>
|
||||
<option value="title">Title</option>
|
||||
<option value="include_filter">CSS/xPath filter</option>
|
||||
<option value="tag">Group / Tag name(s)</option>
|
||||
<option value="interval_minutes">Recheck time (minutes)</option>
|
||||
</select></td>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<button type="submit" class="pure-button pure-input-1-2 pure-button-primary">Import</button>
|
||||
</form>
|
||||
|
||||
|
||||
@@ -4,14 +4,14 @@
|
||||
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||
<script>
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test', watch_uuid=uuid)}}";
|
||||
{% if emailprefix %}
|
||||
const email_notification_prefix=JSON.parse('{{emailprefix|tojson}}');
|
||||
{% endif %}
|
||||
</script>
|
||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||
|
||||
<script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='global-settings.js')}}" defer></script>
|
||||
<div class="edit-form">
|
||||
<div class="tabs collapsable">
|
||||
@@ -109,9 +109,9 @@
|
||||
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>
|
||||
</span>
|
||||
<br>
|
||||
Tip: <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support">Connect using BrightData Proxies, find out more here.</a>
|
||||
Tip: <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support">Connect using Bright Data and Oxylabs Proxies, find out more here.</a>
|
||||
</div>
|
||||
<fieldset class="pure-group" id="webdriver-override-options">
|
||||
<fieldset class="pure-group" id="webdriver-override-options" data-visible-for="application-fetch_backend=html_webdriver">
|
||||
<div class="pure-form-message-inline">
|
||||
<strong>If you're having trouble waiting for the page to be fully rendered (text missing etc), try increasing the 'wait' time here.</strong>
|
||||
<br>
|
||||
@@ -178,6 +178,9 @@ nav
|
||||
<span style="display:none;" id="api-key-copy" >copy</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
<a href="{{url_for('settings_reset_api_key')}}" class="pure-button button-small button-cancel">Regenerate API key</a>
|
||||
</div>
|
||||
</div>
|
||||
<div class="tab-pane-inner" id="proxies">
|
||||
<div id="recommended-proxy">
|
||||
@@ -227,9 +230,17 @@ nav
|
||||
</p>
|
||||
<p><strong>Tip</strong>: "Residential" and "Mobile" proxy type can be more successfull than "Data Center" for blocked websites.
|
||||
|
||||
<div class="pure-control-group">
|
||||
<div class="pure-control-group" id="extra-proxies-setting">
|
||||
{{ render_field(form.requests.form.extra_proxies) }}
|
||||
<span class="pure-form-message-inline">"Name" will be used for selecting the proxy in the Watch Edit settings</span>
|
||||
<span class="pure-form-message-inline">"Name" will be used for selecting the proxy in the Watch Edit settings</span><br>
|
||||
<span class="pure-form-message-inline">SOCKS5 proxies with authentication are only supported with 'plain requests' fetcher, for other fetchers you should whitelist the IP access instead</span>
|
||||
</div>
|
||||
<div class="pure-control-group" id="extra-browsers-setting">
|
||||
<p>
|
||||
<span class="pure-form-message-inline"><i>Extra Browsers</i> can be attached to further defeat CAPTCHA's on websites that are particularly hard to scrape.</span><br>
|
||||
<span class="pure-form-message-inline">Simply paste the connection address into the box, <a href="https://changedetection.io/tutorial/using-bright-datas-scraping-browser-pass-captchas-and-other-protection-when-monitoring">More instructions and examples here</a> </span>
|
||||
</p>
|
||||
{{ render_field(form.requests.form.extra_browsers) }}
|
||||
</div>
|
||||
</div>
|
||||
<div id="actions">
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
<svg class="octicon octicon-mark-github v-align-middle" height="32" viewbox="0 0 16 16" version="1.1" width="32" aria-hidden="true">
|
||||
<path fill-rule="evenodd" d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0016 8c0-4.42-3.58-8-8-8z"></path>
|
||||
<svg class="octicon octicon-mark-github v-align-middle" viewbox="0 0 16 16" version="1.1" aria-hidden="true">
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M 8,0 C 3.58,0 0,3.58 0,8 c 0,3.54 2.29,6.53 5.47,7.59 0.4,0.07 0.55,-0.17 0.55,-0.38 0,-0.19 -0.01,-0.82 -0.01,-1.49 C 4,14.09 3.48,13.23 3.32,12.78 3.23,12.55 2.84,11.84 2.5,11.65 2.22,11.5 1.82,11.13 2.49,11.12 3.12,11.11 3.57,11.7 3.72,11.94 4.44,13.15 5.59,12.81 6.05,12.6 6.12,12.08 6.33,11.73 6.56,11.53 4.78,11.33 2.92,10.64 2.92,7.58 2.92,6.71 3.23,5.99 3.74,5.43 3.66,5.23 3.38,4.41 3.82,3.31 c 0,0 0.67,-0.21 2.2,0.82 0.64,-0.18 1.32,-0.27 2,-0.27 0.68,0 1.36,0.09 2,0.27 1.53,-1.04 2.2,-0.82 2.2,-0.82 0.44,1.1 0.16,1.92 0.08,2.12 0.51,0.56 0.82,1.27 0.82,2.15 0,3.07 -1.87,3.75 -3.65,3.95 0.29,0.25 0.54,0.73 0.54,1.48 0,1.07 -0.01,1.93 -0.01,2.2 0,0.21 0.15,0.46 0.55,0.38 A 8.013,8.013 0 0 0 16,8 C 16,3.58 12.42,0 8,0 Z"
|
||||
id="path2" />
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 749 B After Width: | Height: | Size: 917 B |
@@ -1 +1 @@
|
||||
<?xml version="1.0" encoding="utf-8"?><svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 122.879 119.799" enable-background="new 0 0 122.879 119.799" xml:space="preserve"><g><path d="M49.988,0h0.016v0.007C63.803,0.011,76.298,5.608,85.34,14.652c9.027,9.031,14.619,21.515,14.628,35.303h0.007v0.033v0.04 h-0.007c-0.005,5.557-0.917,10.905-2.594,15.892c-0.281,0.837-0.575,1.641-0.877,2.409v0.007c-1.446,3.66-3.315,7.12-5.547,10.307 l29.082,26.139l0.018,0.016l0.157,0.146l0.011,0.011c1.642,1.563,2.536,3.656,2.649,5.78c0.11,2.1-0.543,4.248-1.979,5.971 l-0.011,0.016l-0.175,0.203l-0.035,0.035l-0.146,0.16l-0.016,0.021c-1.565,1.642-3.654,2.534-5.78,2.646 c-2.097,0.111-4.247-0.54-5.971-1.978l-0.015-0.011l-0.204-0.175l-0.029-0.024L78.761,90.865c-0.88,0.62-1.778,1.209-2.687,1.765 c-1.233,0.755-2.51,1.466-3.813,2.115c-6.699,3.342-14.269,5.222-22.272,5.222v0.007h-0.016v-0.007 c-13.799-0.004-26.296-5.601-35.338-14.645C5.605,76.291,0.016,63.805,0.007,50.021H0v-0.033v-0.016h0.007 c0.004-13.799,5.601-26.296,14.645-35.338C23.683,5.608,36.167,0.016,49.955,0.007V0H49.988L49.988,0z M50.004,11.21v0.007h-0.016 h-0.033V11.21c-10.686,0.007-20.372,4.35-27.384,11.359C15.56,29.578,11.213,39.274,11.21,49.973h0.007v0.016v0.033H11.21 c0.007,10.686,4.347,20.367,11.359,27.381c7.009,7.012,16.705,11.359,27.403,11.361v-0.007h0.016h0.033v0.007 c10.686-0.007,20.368-4.348,27.382-11.359c7.011-7.009,11.358-16.702,11.36-27.4h-0.006v-0.016v-0.033h0.006 c-0.006-10.686-4.35-20.372-11.358-27.384C70.396,15.56,60.703,11.213,50.004,11.21L50.004,11.21z"/></g></svg>
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 122.879 119.799" enable-background="new 0 0 122.879 119.799" xml:space="preserve"><g><path d="M49.988,0h0.016v0.007C63.803,0.011,76.298,5.608,85.34,14.652c9.027,9.031,14.619,21.515,14.628,35.303h0.007v0.033v0.04 h-0.007c-0.005,5.557-0.917,10.905-2.594,15.892c-0.281,0.837-0.575,1.641-0.877,2.409v0.007c-1.446,3.66-3.315,7.12-5.547,10.307 l29.082,26.139l0.018,0.016l0.157,0.146l0.011,0.011c1.642,1.563,2.536,3.656,2.649,5.78c0.11,2.1-0.543,4.248-1.979,5.971 l-0.011,0.016l-0.175,0.203l-0.035,0.035l-0.146,0.16l-0.016,0.021c-1.565,1.642-3.654,2.534-5.78,2.646 c-2.097,0.111-4.247-0.54-5.971-1.978l-0.015-0.011l-0.204-0.175l-0.029-0.024L78.761,90.865c-0.88,0.62-1.778,1.209-2.687,1.765 c-1.233,0.755-2.51,1.466-3.813,2.115c-6.699,3.342-14.269,5.222-22.272,5.222v0.007h-0.016v-0.007 c-13.799-0.004-26.296-5.601-35.338-14.645C5.605,76.291,0.016,63.805,0.007,50.021H0v-0.033v-0.016h0.007 c0.004-13.799,5.601-26.296,14.645-35.338C23.683,5.608,36.167,0.016,49.955,0.007V0H49.988L49.988,0z M50.004,11.21v0.007h-0.016 h-0.033V11.21c-10.686,0.007-20.372,4.35-27.384,11.359C15.56,29.578,11.213,39.274,11.21,49.973h0.007v0.016v0.033H11.21 c0.007,10.686,4.347,20.367,11.359,27.381c7.009,7.012,16.705,11.359,27.403,11.361v-0.007h0.016h0.033v0.007 c10.686-0.007,20.368-4.348,27.382-11.359c7.011-7.009,11.358-16.702,11.36-27.4h-0.006v-0.016v-0.033h0.006 c-0.006-10.686-4.35-20.372-11.358-27.384C70.396,15.56,60.703,11.213,50.004,11.21L50.004,11.21z"/></g></svg>
|
||||
|
Before Width: | Height: | Size: 1.6 KiB After Width: | Height: | Size: 1.5 KiB |
@@ -1,6 +1,6 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.jinja' import render_simple_field, render_field %}
|
||||
{% from '_helpers.jinja' import render_simple_field, render_field, render_nolabel_field %}
|
||||
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script>
|
||||
|
||||
@@ -11,17 +11,14 @@
|
||||
<fieldset>
|
||||
<legend>Add a new change detection watch</legend>
|
||||
<div id="watch-add-wrapper-zone">
|
||||
<div>
|
||||
{{ render_simple_field(form.url, placeholder="https://...", required=true) }}
|
||||
{{ render_simple_field(form.tags, value=tags[active_tag].title if active_tag else '', placeholder="watch label / tag") }}
|
||||
</div>
|
||||
<div>
|
||||
{{ render_simple_field(form.watch_submit_button, title="Watch this URL!" ) }}
|
||||
{{ render_simple_field(form.edit_and_watch_submit_button, title="Edit first then Watch") }}
|
||||
</div>
|
||||
|
||||
{{ render_nolabel_field(form.url, placeholder="https://...", required=true) }}
|
||||
{{ render_nolabel_field(form.tags, value=tags[active_tag].title if active_tag else '', placeholder="watch label / tag") }}
|
||||
{{ render_nolabel_field(form.watch_submit_button, title="Watch this URL!" ) }}
|
||||
{{ render_nolabel_field(form.edit_and_watch_submit_button, title="Edit first then Watch") }}
|
||||
</div>
|
||||
<div id="quick-watch-processor-type">
|
||||
{{ render_simple_field(form.processor, title="Edit first then Watch") }}
|
||||
{{ render_simple_field(form.processor) }}
|
||||
</div>
|
||||
|
||||
</fieldset>
|
||||
@@ -82,12 +79,15 @@
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% for watch in (watches|sort(attribute=sort_attribute, reverse=sort_order == 'asc'))|pagination_slice(skip=pagination.skip) %}
|
||||
|
||||
{% set is_unviewed = watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %}
|
||||
|
||||
<tr id="{{ watch.uuid }}"
|
||||
class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }} processor-{{ watch['processor'] }}
|
||||
{% if watch.last_error is defined and watch.last_error != False %}error{% endif %}
|
||||
{% if watch.last_notification_error is defined and watch.last_notification_error != False %}error{% endif %}
|
||||
{% if watch.paused is defined and watch.paused != False %}paused{% endif %}
|
||||
{% if watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %}unviewed{% endif %}
|
||||
{% if is_unviewed %}unviewed{% endif %}
|
||||
{% if watch.uuid in queued_uuids %}queued{% endif %}">
|
||||
<td class="inline checkbox-uuid" ><input name="uuids" type="checkbox" value="{{ watch.uuid}} " > <span>{{ loop.index+pagination.skip }}</span></td>
|
||||
<td class="inline watch-controls">
|
||||
@@ -104,8 +104,9 @@
|
||||
|
||||
{% if watch.get_fetch_backend == "html_webdriver"
|
||||
or ( watch.get_fetch_backend == "system" and system_default_fetcher == 'html_webdriver' )
|
||||
or "extra_browser_" in watch.get_fetch_backend
|
||||
%}
|
||||
<img class="status-icon" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" title="Using a chrome browser" >
|
||||
<img class="status-icon" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" title="Using a Chrome browser" >
|
||||
{% endif %}
|
||||
|
||||
{%if watch.is_pdf %}<img class="status-icon" src="{{url_for('static_content', group='images', filename='pdf-icon.svg')}}" title="Converting PDF to text" >{% endif %}
|
||||
@@ -140,7 +141,7 @@
|
||||
{% if watch['processor'] == 'restock_diff' %}
|
||||
<span class="restock-label {{'in-stock' if watch['in_stock'] else 'not-in-stock' }}" title="detecting restock conditions">
|
||||
<!-- maybe some object watch['processor'][restock_diff] or.. -->
|
||||
{% if watch['last_checked'] %}
|
||||
{% if watch['last_checked'] and watch['in_stock'] != None %}
|
||||
{% if watch['in_stock'] %} In stock {% else %} Not in stock {% endif %}
|
||||
{% else %}
|
||||
Not yet checked
|
||||
@@ -154,8 +155,8 @@
|
||||
{% endfor %}
|
||||
|
||||
</td>
|
||||
<td class="last-checked">{{watch|format_last_checked_time|safe}}</td>
|
||||
<td class="last-changed">{% if watch.history_n >=2 and watch.last_changed >0 %}
|
||||
<td class="last-checked" data-timestamp="{{ watch.last_checked }}">{{watch|format_last_checked_time|safe}}</td>
|
||||
<td class="last-changed" data-timestamp="{{ watch.last_changed }}">{% if watch.history_n >=2 and watch.last_changed >0 %}
|
||||
{{watch.last_changed|format_timestamp_timeago}}
|
||||
{% else %}
|
||||
Not yet
|
||||
@@ -166,7 +167,13 @@
|
||||
class="recheck pure-button pure-button-primary">{% if watch.uuid in queued_uuids %}Queued{% else %}Recheck{% endif %}</a>
|
||||
<a href="{{ url_for('edit_page', uuid=watch.uuid)}}" class="pure-button pure-button-primary">Edit</a>
|
||||
{% if watch.history_n >= 2 %}
|
||||
<a href="{{ url_for('diff_history_page', uuid=watch.uuid) }}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">Diff</a>
|
||||
|
||||
{% if is_unviewed %}
|
||||
<a href="{{ url_for('diff_history_page', uuid=watch.uuid, from_version=watch.get_next_snapshot_key_to_last_viewed) }}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">Diff</a>
|
||||
{% else %}
|
||||
<a href="{{ url_for('diff_history_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">Diff</a>
|
||||
{% endif %}
|
||||
|
||||
{% else %}
|
||||
{% if watch.history_n == 1 or (watch.history_n ==0 and watch.error_text_ctime )%}
|
||||
<a href="{{ url_for('preview_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button pure-button-primary">Preview</a>
|
||||
@@ -178,13 +185,18 @@
|
||||
</tbody>
|
||||
</table>
|
||||
<ul id="post-list-buttons">
|
||||
{% if errored_count %}
|
||||
<li>
|
||||
<a href="{{url_for('index', with_errors=1, tag=request.args.get('tag')) }}" class="pure-button button-tag button-error ">With errors ({{ errored_count }})</a>
|
||||
</li>
|
||||
{% endif %}
|
||||
{% if has_unviewed %}
|
||||
<li>
|
||||
<a href="{{url_for('mark_all_viewed', tag=request.args.get('tag')) }}" class="pure-button button-tag ">Mark all viewed</a>
|
||||
<a href="{{url_for('mark_all_viewed',with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Mark all viewed</a>
|
||||
</li>
|
||||
{% endif %}
|
||||
<li>
|
||||
<a href="{{ url_for('form_watch_checknow', tag=active_tag) }}" class="pure-button button-tag ">Recheck
|
||||
<a href="{{ url_for('form_watch_checknow', tag=active_tag, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Recheck
|
||||
all {% if active_tag%} in "{{tags[active_tag].title}}"{%endif%}</a>
|
||||
</li>
|
||||
<li>
|
||||
|
||||
@@ -13,22 +13,17 @@ global app
|
||||
|
||||
|
||||
def cleanup(datastore_path):
|
||||
import glob
|
||||
# Unlink test output files
|
||||
files = [
|
||||
'count.txt',
|
||||
'endpoint-content.txt'
|
||||
'headers.txt',
|
||||
'headers-testtag.txt',
|
||||
'notification.txt',
|
||||
'secret.txt',
|
||||
'url-watches.json',
|
||||
'output.txt',
|
||||
]
|
||||
for file in files:
|
||||
try:
|
||||
os.unlink("{}/{}".format(datastore_path, file))
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
for g in ["*.txt", "*.json", "*.pdf"]:
|
||||
files = glob.glob(os.path.join(datastore_path, g))
|
||||
for f in files:
|
||||
if 'proxies.json' in f:
|
||||
# Usually mounted by docker container during test time
|
||||
continue
|
||||
if os.path.isfile(f):
|
||||
os.unlink(f)
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def app(request):
|
||||
|
||||
1
changedetectionio/tests/custom_browser_url/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# placeholder
|
||||
@@ -0,0 +1,89 @@
|
||||
# !/usr/bin/python3
|
||||
import os
|
||||
|
||||
from flask import url_for
|
||||
from ..util import live_server_setup, wait_for_all_checks
|
||||
|
||||
def do_test(client, live_server, make_test_use_extra_browser=False):
|
||||
|
||||
# Grep for this string in the logs?
|
||||
test_url = f"https://changedetection.io/ci-test.html"
|
||||
custom_browser_name = 'custom browser URL'
|
||||
|
||||
# needs to be set and something like 'ws://127.0.0.1:3000?stealth=1&--disable-web-security=true'
|
||||
assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
|
||||
|
||||
#####################
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={"application-empty_pages_are_a_change": "",
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-fetch_backend': "html_webdriver",
|
||||
# browserless-custom-url is setup in .github/workflows/test-only.yml
|
||||
# the test script run_custom_browser_url_test.sh will look for 'custom-browser-search-string' in the container logs
|
||||
'requests-extra_browsers-0-browser_connection_url': 'ws://browserless-custom-url:3000?stealth=1&--disable-web-security=true&custom-browser-search-string=1',
|
||||
'requests-extra_browsers-0-browser_name': custom_browser_name
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
if make_test_use_extra_browser:
|
||||
|
||||
# So the name should appear in the edit page under "Request" > "Fetch Method"
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b'custom browser URL' in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': f"extra_browser_{custom_browser_name}",
|
||||
'webdriver_js_execute_code': ''
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Force recheck
|
||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
assert b'1 watches queued for rechecking.' in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b'cool it works' in res.data
|
||||
|
||||
|
||||
# Requires playwright to be installed
|
||||
def test_request_via_custom_browser_url(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
# We do this so we can grep the logs of the custom container and see if the request actually went through that container
|
||||
do_test(client, live_server, make_test_use_extra_browser=True)
|
||||
|
||||
|
||||
def test_request_not_via_custom_browser_url(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
# We do this so we can grep the logs of the custom container and see if the request actually went through that container
|
||||
do_test(client, live_server, make_test_use_extra_browser=False)
|
||||
@@ -28,8 +28,6 @@ def test_fetch_webdriver_content(client, live_server):
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(3)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
|
||||
|
||||
BIN
changedetectionio/tests/import/spreadsheet.xlsx
Normal file
@@ -2,12 +2,11 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from ..util import live_server_setup
|
||||
from ..util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
def test_preferred_proxy(client, live_server):
|
||||
time.sleep(1)
|
||||
live_server_setup(live_server)
|
||||
time.sleep(1)
|
||||
url = "http://chosen.changedetection.io"
|
||||
|
||||
res = client.post(
|
||||
@@ -20,7 +19,7 @@ def test_preferred_proxy(client, live_server):
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
@@ -34,5 +33,5 @@ def test_preferred_proxy(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
# Now the request should appear in the second-squid logs
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"socks5proxy": {
|
||||
"label": "socks5proxy",
|
||||
"url": "socks5://proxy_user123:proxy_pass123@socks5proxy:1080"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"socks5proxy": {
|
||||
"label": "socks5proxy",
|
||||
"url": "socks5://socks5proxy-noauth:1080"
|
||||
}
|
||||
}
|
||||
63
changedetectionio/tests/proxy_socks5/test_socks5_proxy.py
Normal file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/python3
|
||||
import os
|
||||
import time
|
||||
from flask import url_for
|
||||
from changedetectionio.tests.util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
def test_socks5(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Setup a proxy
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-ignore_whitespace": "y",
|
||||
"application-fetch_backend": "html_requests",
|
||||
# set in .github/workflows/test-only.yml
|
||||
"requests-extra_proxies-0-proxy_url": "socks5://proxy_user123:proxy_pass123@socks5proxy:1080",
|
||||
"requests-extra_proxies-0-proxy_name": "socks5proxy",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
test_url = "https://changedetection.io/CHANGELOG.txt?socks-test-tag=" + os.getenv('SOCKSTEST', '')
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||
)
|
||||
# check the proxy is offered as expected
|
||||
assert b'ui-0socks5proxy' in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||
data={
|
||||
"include_filters": "",
|
||||
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
|
||||
"headers": "",
|
||||
"proxy": "ui-0socks5proxy",
|
||||
"tags": "",
|
||||
"url": test_url,
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"unpaused" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Should see the proper string
|
||||
assert "+0200:".encode('utf-8') in res.data
|
||||
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/python3
|
||||
import os
|
||||
import time
|
||||
from flask import url_for
|
||||
from changedetectionio.tests.util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
# should be proxies.json mounted from run_proxy_tests.sh already
|
||||
# -v `pwd`/tests/proxy_socks5/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json
|
||||
def test_socks5_from_proxiesjson_file(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
test_url = "https://changedetection.io/CHANGELOG.txt?socks-test-tag=" + os.getenv('SOCKSTEST', '')
|
||||
|
||||
res = client.get(url_for("settings_page"))
|
||||
assert b'name="requests-proxy" type="radio" value="socks5proxy"' in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||
)
|
||||
# check the proxy is offered as expected
|
||||
assert b'name="proxy" type="radio" value="socks5proxy"' in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||
data={
|
||||
"include_filters": "",
|
||||
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
|
||||
"headers": "",
|
||||
"proxy": "socks5proxy",
|
||||
"tags": "",
|
||||
"url": test_url,
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"unpaused" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Should see the proper string
|
||||
assert "+0200:".encode('utf-8') in res.data
|
||||
BIN
changedetectionio/tests/test2.pdf
Normal file
@@ -1,4 +1,4 @@
|
||||
from . util import live_server_setup, extract_UUID_from_client
|
||||
from .util import live_server_setup, extract_UUID_from_client, wait_for_all_checks
|
||||
from flask import url_for
|
||||
import time
|
||||
|
||||
@@ -19,10 +19,16 @@ def test_check_access_control(app, client, live_server):
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(2)
|
||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(3)
|
||||
# causes a 'Popped wrong request context.' error when client. is accessed?
|
||||
#wait_for_all_checks(client)
|
||||
|
||||
res = c.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
assert b'1 watches queued for rechecking.' in res.data
|
||||
time.sleep(2)
|
||||
time.sleep(3)
|
||||
# causes a 'Popped wrong request context.' error when client. is accessed?
|
||||
#wait_for_all_checks(client)
|
||||
|
||||
|
||||
# Enable password check and diff page access bypass
|
||||
res = c.post(
|
||||
@@ -42,7 +48,7 @@ def test_check_access_control(app, client, live_server):
|
||||
assert b"Login" in res.data
|
||||
|
||||
# The diff page should return something valid when logged out
|
||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||
res = c.get(url_for("diff_history_page", uuid="first"))
|
||||
assert b'Random content' in res.data
|
||||
|
||||
# Check wrong password does not let us in
|
||||
@@ -83,6 +89,8 @@ def test_check_access_control(app, client, live_server):
|
||||
res = c.get(url_for("logout"),
|
||||
follow_redirects=True)
|
||||
|
||||
assert b"Login" in res.data
|
||||
|
||||
res = c.get(url_for("settings_page"),
|
||||
follow_redirects=True)
|
||||
|
||||
@@ -160,5 +168,5 @@ def test_check_access_control(app, client, live_server):
|
||||
assert b"Login" in res.data
|
||||
|
||||
# The diff page should return something valid when logged out
|
||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||
res = c.get(url_for("diff_history_page", uuid="first"))
|
||||
assert b'Random content' not in res.data
|
||||
|
||||
@@ -96,7 +96,9 @@ def test_api_simple(client, live_server):
|
||||
)
|
||||
assert watch_uuid in res.json.keys()
|
||||
before_recheck_info = res.json[watch_uuid]
|
||||
|
||||
assert before_recheck_info['last_checked'] != 0
|
||||
|
||||
#705 `last_changed` should be zero on the first check
|
||||
assert before_recheck_info['last_changed'] == 0
|
||||
assert before_recheck_info['title'] == 'My test URL'
|
||||
@@ -157,6 +159,18 @@ def test_api_simple(client, live_server):
|
||||
# @todo how to handle None/default global values?
|
||||
assert watch['history_n'] == 2, "Found replacement history section, which is in its own API"
|
||||
|
||||
assert watch.get('viewed') == False
|
||||
# Loading the most recent snapshot should force viewed to become true
|
||||
client.get(url_for("diff_history_page", uuid="first"), follow_redirects=True)
|
||||
|
||||
# Fetch the whole watch again, viewed should be true
|
||||
res = client.get(
|
||||
url_for("watch", uuid=watch_uuid),
|
||||
headers={'x-api-key': api_key}
|
||||
)
|
||||
watch = res.json
|
||||
assert watch.get('viewed') == True
|
||||
|
||||
# basic systeminfo check
|
||||
res = client.get(
|
||||
url_for("systeminfo"),
|
||||
@@ -343,3 +357,25 @@ def test_api_watch_PUT_update(client, live_server):
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
def test_api_import(client, live_server):
|
||||
api_key = extract_api_key_from_UI(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("import") + "?tag=import-test",
|
||||
data='https://website1.com\r\nhttps://website2.com',
|
||||
headers={'x-api-key': api_key},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert res.status_code == 200
|
||||
assert len(res.json) == 2
|
||||
res = client.get(url_for("index"))
|
||||
assert b"https://website1.com" in res.data
|
||||
assert b"https://website2.com" in res.data
|
||||
|
||||
# Should see the new tag in the tag/groups list
|
||||
res = client.get(url_for('tags.tags_overview_page'))
|
||||
assert b'import-test' in res.data
|
||||
|
||||
@@ -89,7 +89,7 @@ def test_check_basic_change_detection_functionality(client, live_server):
|
||||
|
||||
# Following the 'diff' link, it should no longer display as 'unviewed' even after we recheck it a few times
|
||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||
assert b'Compare newest' in res.data
|
||||
assert b'selected=""' in res.data, "Confirm diff history page loaded"
|
||||
|
||||
# Check the [preview] pulls the right one
|
||||
res = client.get(
|
||||
|
||||
@@ -24,7 +24,7 @@ def test_check_extract_text_from_diff(client, live_server):
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(1)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Load in 5 different numbers/changes
|
||||
last_date=""
|
||||
|
||||
@@ -202,3 +202,32 @@ def test_check_filter_and_regex_extract(client, live_server):
|
||||
|
||||
# Should not be here
|
||||
assert b'Some text that did change' not in res.data
|
||||
|
||||
|
||||
|
||||
def test_regex_error_handling(client, live_server):
|
||||
|
||||
#live_server_setup(live_server)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
### test regex error handling
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"extract_text": '/something bad\d{3/XYZ',
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b'is not a valid regular expression.' in res.data
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
@@ -33,8 +33,6 @@ def test_strip_regex_text_func():
|
||||
"/not"
|
||||
]
|
||||
|
||||
|
||||
fetcher = fetch_site_status.perform_site_check(datastore=False)
|
||||
stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines)
|
||||
|
||||
assert b"but 1 lines" in stripped_content
|
||||
|
||||
@@ -24,7 +24,6 @@ def test_strip_text_func():
|
||||
|
||||
ignore_lines = ["sometimes"]
|
||||
|
||||
fetcher = fetch_site_status.perform_site_check(datastore=False)
|
||||
stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines)
|
||||
|
||||
assert b"sometimes" not in stripped_content
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import io
|
||||
import os
|
||||
import time
|
||||
|
||||
from flask import url_for
|
||||
|
||||
from .util import live_server_setup
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
def test_setup(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def test_import(client, live_server):
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
@@ -119,3 +122,97 @@ def test_import_distillio(client, live_server):
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
# Clear flask alerts
|
||||
res = client.get(url_for("index"))
|
||||
|
||||
def test_import_custom_xlsx(client, live_server):
|
||||
"""Test can upload a excel spreadsheet and the watches are created correctly"""
|
||||
|
||||
#live_server_setup(live_server)
|
||||
|
||||
dirname = os.path.dirname(__file__)
|
||||
filename = os.path.join(dirname, 'import/spreadsheet.xlsx')
|
||||
with open(filename, 'rb') as f:
|
||||
|
||||
data= {
|
||||
'file_mapping': 'custom',
|
||||
'custom_xlsx[col_0]': '1',
|
||||
'custom_xlsx[col_1]': '3',
|
||||
'custom_xlsx[col_2]': '5',
|
||||
'custom_xlsx[col_3]': '4',
|
||||
'custom_xlsx[col_type_0]': 'title',
|
||||
'custom_xlsx[col_type_1]': 'url',
|
||||
'custom_xlsx[col_type_2]': 'include_filters',
|
||||
'custom_xlsx[col_type_3]': 'interval_minutes',
|
||||
'xlsx_file': (io.BytesIO(f.read()), 'spreadsheet.xlsx')
|
||||
}
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data=data,
|
||||
follow_redirects=True,
|
||||
)
|
||||
|
||||
assert b'4 imported from custom .xlsx' in res.data
|
||||
# Because this row was actually just a header with no usable URL, we should get an error
|
||||
assert b'Error processing row number 1' in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("index")
|
||||
)
|
||||
|
||||
assert b'Somesite results ABC' in res.data
|
||||
assert b'City news results' in res.data
|
||||
|
||||
# Just find one to check over
|
||||
for uuid, watch in live_server.app.config['DATASTORE'].data['watching'].items():
|
||||
if watch.get('title') == 'Somesite results ABC':
|
||||
filters = watch.get('include_filters')
|
||||
assert filters[0] == '/html[1]/body[1]/div[4]/div[1]/div[1]/div[1]||//*[@id=\'content\']/div[3]/div[1]/div[1]||//*[@id=\'content\']/div[1]'
|
||||
assert watch.get('time_between_check') == {'weeks': 0, 'days': 1, 'hours': 6, 'minutes': 24, 'seconds': 0}
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_import_watchete_xlsx(client, live_server):
|
||||
"""Test can upload a excel spreadsheet and the watches are created correctly"""
|
||||
|
||||
#live_server_setup(live_server)
|
||||
dirname = os.path.dirname(__file__)
|
||||
filename = os.path.join(dirname, 'import/spreadsheet.xlsx')
|
||||
with open(filename, 'rb') as f:
|
||||
|
||||
data= {
|
||||
'file_mapping': 'wachete',
|
||||
'xlsx_file': (io.BytesIO(f.read()), 'spreadsheet.xlsx')
|
||||
}
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data=data,
|
||||
follow_redirects=True,
|
||||
)
|
||||
|
||||
assert b'4 imported from Wachete .xlsx' in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("index")
|
||||
)
|
||||
|
||||
assert b'Somesite results ABC' in res.data
|
||||
assert b'City news results' in res.data
|
||||
|
||||
# Just find one to check over
|
||||
for uuid, watch in live_server.app.config['DATASTORE'].data['watching'].items():
|
||||
if watch.get('title') == 'Somesite results ABC':
|
||||
filters = watch.get('include_filters')
|
||||
assert filters[0] == '/html[1]/body[1]/div[4]/div[1]/div[1]/div[1]||//*[@id=\'content\']/div[3]/div[1]/div[1]||//*[@id=\'content\']/div[1]'
|
||||
assert watch.get('time_between_check') == {'weeks': 0, 'days': 1, 'hours': 6, 'minutes': 24, 'seconds': 0}
|
||||
assert watch.get('fetch_backend') == 'html_requests' # Has inactive 'dynamic wachet'
|
||||
|
||||
if watch.get('title') == 'JS website':
|
||||
assert watch.get('fetch_backend') == 'html_webdriver' # Has active 'dynamic wachet'
|
||||
|
||||
if watch.get('title') == 'system default website':
|
||||
assert watch.get('fetch_backend') == 'system' # uses default if blank
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
@@ -281,7 +281,8 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
||||
|
||||
# CUSTOM JSON BODY CHECK for POST://
|
||||
set_original_response()
|
||||
test_notification_url = url_for('test_notification_endpoint', _external=True).replace('http://', 'post://')+"?xxx={{ watch_url }}"
|
||||
# https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#header-manipulation
|
||||
test_notification_url = url_for('test_notification_endpoint', _external=True).replace('http://', 'post://')+"?xxx={{ watch_url }}&+custom-header=123"
|
||||
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
@@ -297,10 +298,7 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b'Settings updated' in res.data
|
||||
client.get(
|
||||
url_for("form_delete", uuid="all"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Add a watch and trigger a HTTP POST
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
@@ -315,7 +313,9 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
||||
set_modified_response()
|
||||
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
time.sleep(2) # plus extra delay for notifications to fire
|
||||
|
||||
with open("test-datastore/notification.txt", 'r') as f:
|
||||
x = f.read()
|
||||
@@ -328,6 +328,13 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
||||
with open("test-datastore/notification-url.txt", 'r') as f:
|
||||
notification_url = f.read()
|
||||
assert 'xxx=http' in notification_url
|
||||
# apprise style headers should be stripped
|
||||
assert 'custom-header' not in notification_url
|
||||
|
||||
with open("test-datastore/notification-headers.txt", 'r') as f:
|
||||
notification_headers = f.read()
|
||||
assert 'custom-header: 123' in notification_headers.lower()
|
||||
|
||||
|
||||
# Should always be automatically detected as JSON content type even when we set it as 'Text' (default)
|
||||
assert os.path.isfile("test-datastore/notification-content-type.txt")
|
||||
@@ -335,3 +342,8 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
||||
assert 'application/json' in f.read()
|
||||
|
||||
os.unlink("test-datastore/notification-url.txt")
|
||||
|
||||
client.get(
|
||||
url_for("form_delete", uuid="all"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -2,9 +2,8 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import set_original_response, set_modified_response, live_server_setup
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
|
||||
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
# `subtractive_selectors` should still work in `source:` type requests
|
||||
def test_fetch_pdf(client, live_server):
|
||||
@@ -22,7 +21,9 @@ def test_fetch_pdf(client, live_server):
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
@@ -33,8 +34,42 @@ def test_fetch_pdf(client, live_server):
|
||||
|
||||
# So we know if the file changes in other ways
|
||||
import hashlib
|
||||
md5 = hashlib.md5(open("test-datastore/endpoint-test.pdf", 'rb').read()).hexdigest().upper()
|
||||
original_md5 = hashlib.md5(open("test-datastore/endpoint-test.pdf", 'rb').read()).hexdigest().upper()
|
||||
# We should have one
|
||||
assert len(md5) >0
|
||||
assert len(original_md5) >0
|
||||
# And it's going to be in the document
|
||||
assert b'Document checksum - '+bytes(str(md5).encode('utf-8')) in res.data
|
||||
assert b'Document checksum - '+bytes(str(original_md5).encode('utf-8')) in res.data
|
||||
|
||||
|
||||
shutil.copy("tests/test2.pdf", "test-datastore/endpoint-test.pdf")
|
||||
changed_md5 = hashlib.md5(open("test-datastore/endpoint-test.pdf", 'rb').read()).hexdigest().upper()
|
||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
assert b'1 watches queued for rechecking.' in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Now something should be ready, indicated by having a 'unviewed' class
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
# The original checksum should be not be here anymore (cdio adds it to the bottom of the text)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert original_md5.encode('utf-8') not in res.data
|
||||
assert changed_md5.encode('utf-8') in res.data
|
||||
|
||||
|
||||
res = client.get(
|
||||
url_for("diff_history_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert original_md5.encode('utf-8') in res.data
|
||||
assert changed_md5.encode('utf-8') in res.data
|
||||
|
||||
assert b'here is a change' in res.data
|
||||
|
||||
@@ -80,8 +80,11 @@ def test_headers_in_request(client, live_server):
|
||||
|
||||
# Should be only one with headers set
|
||||
assert watches_with_headers==1
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_body_in_request(client, live_server):
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_body', _external=True)
|
||||
if os.getenv('PLAYWRIGHT_DRIVER_URL'):
|
||||
@@ -170,7 +173,8 @@ def test_body_in_request(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Body must be empty when Request Method is set to GET" in res.data
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_method_in_request(client, live_server):
|
||||
# Add our URL to the import page
|
||||
|
||||
@@ -2,12 +2,61 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, \
|
||||
extract_UUID_from_client
|
||||
|
||||
|
||||
def set_original_cdata_xml():
|
||||
test_return_data = """<rss xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:media="http://search.yahoo.com/mrss/" xmlns:atom="http://www.w3.org/2005/Atom" version="2.0">
|
||||
<channel>
|
||||
<title>Gizi</title>
|
||||
<link>https://test.com</link>
|
||||
<atom:link href="https://testsite.com" rel="self" type="application/rss+xml"/>
|
||||
<description>
|
||||
<![CDATA[ The Future Could Be Here ]]>
|
||||
</description>
|
||||
<language>en</language>
|
||||
<item>
|
||||
<title>
|
||||
<![CDATA[ <img src="https://testsite.com/hacked.jpg"> Hackers can access your computer ]]>
|
||||
</title>
|
||||
<link>https://testsite.com/news/12341234234</link>
|
||||
<description>
|
||||
<![CDATA[ <img class="type:primaryImage" src="https://testsite.com/701c981da04869e.jpg"/><p>The days of Terminator and The Matrix could be closer. But be positive.</p><p><a href="https://testsite.com">Read more link...</a></p> ]]>
|
||||
</description>
|
||||
<category>cybernetics</category>
|
||||
<category>rand corporation</category>
|
||||
<pubDate>Tue, 17 Oct 2023 15:10:00 GMT</pubDate>
|
||||
<guid isPermaLink="false">1850933241</guid>
|
||||
<dc:creator>
|
||||
<![CDATA[ Mr Hacker News ]]>
|
||||
</dc:creator>
|
||||
<media:thumbnail url="https://testsite.com/thumbnail-c224e10d81488e818701c981da04869e.jpg"/>
|
||||
</item>
|
||||
|
||||
<item>
|
||||
<title> Some other title </title>
|
||||
<link>https://testsite.com/news/12341234236</link>
|
||||
<description>
|
||||
Some other description
|
||||
</description>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
|
||||
def test_setup(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def test_rss_and_token(client, live_server):
|
||||
# live_server_setup(live_server)
|
||||
|
||||
set_original_response()
|
||||
live_server_setup(live_server)
|
||||
rss_token = extract_rss_token_from_UI(client)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
@@ -17,11 +66,11 @@ def test_rss_and_token(client, live_server):
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
rss_token = extract_rss_token_from_UI(client)
|
||||
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
set_modified_response()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.get(
|
||||
@@ -37,3 +86,80 @@ def test_rss_and_token(client, live_server):
|
||||
)
|
||||
assert b"Access denied, bad token" not in res.data
|
||||
assert b"Random content" in res.data
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
def test_basic_cdata_rss_markup(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
|
||||
set_original_cdata_xml()
|
||||
|
||||
test_url = url_for('test_endpoint', content_type="application/xml", _external=True)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b'CDATA' not in res.data
|
||||
assert b'<![' not in res.data
|
||||
assert b'Hackers can access your computer' in res.data
|
||||
assert b'The days of Terminator' in res.data
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
def test_rss_xpath_filtering(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
|
||||
set_original_cdata_xml()
|
||||
|
||||
test_url = url_for('test_endpoint', content_type="application/xml", _external=True)
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||
|
||||
uuid = extract_UUID_from_client(client)
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid=uuid, unpause_on_save=1),
|
||||
data={
|
||||
"include_filters": "//item/title",
|
||||
"fetch_backend": "html_requests",
|
||||
"headers": "",
|
||||
"proxy": "no-proxy",
|
||||
"tags": "",
|
||||
"url": test_url,
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"unpaused" in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b'CDATA' not in res.data
|
||||
assert b'<![' not in res.data
|
||||
# #1874 All but the first <title was getting selected
|
||||
# Convert any HTML with just a top level <title> to <h1> to be sure title renders
|
||||
|
||||
assert b'Hackers can access your computer' in res.data # Should ONLY be selected by the xpath
|
||||
assert b'Some other title' in res.data # Should ONLY be selected by the xpath
|
||||
assert b'The days of Terminator' not in res.data # Should NOT be selected by the xpath
|
||||
assert b'Some other description' not in res.data # Should NOT be selected by the xpath
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from flask import url_for
|
||||
from . util import set_original_response, set_modified_response, live_server_setup
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
|
||||
import time
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ def test_bad_access(client, live_server):
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Attempt to add a body with a GET method
|
||||
res = client.post(
|
||||
@@ -59,7 +60,7 @@ def test_bad_access(client, live_server):
|
||||
data={"url": 'file:///tasty/disk/drive', "tags": ''},
|
||||
follow_redirects=True
|
||||
)
|
||||
time.sleep(1)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
|
||||
assert b'file:// type access is denied for security reasons.' in res.data
|
||||
@@ -2,13 +2,15 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
|
||||
from ..html_tools import *
|
||||
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
|
||||
def set_original_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
@@ -26,6 +28,7 @@ def set_original_response():
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
|
||||
def set_modified_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
@@ -44,11 +47,12 @@ def set_modified_response():
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# Handle utf-8 charset replies https://github.com/dgtlmoon/changedetection.io/pull/613
|
||||
def test_check_xpath_filter_utf8(client, live_server):
|
||||
filter='//item/*[self::description]'
|
||||
filter = '//item/*[self::description]'
|
||||
|
||||
d='''<?xml version="1.0" encoding="UTF-8"?>
|
||||
d = '''<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0">
|
||||
<channel>
|
||||
<title>rpilocator.com</title>
|
||||
@@ -86,14 +90,14 @@ def test_check_xpath_filter_utf8(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(1)
|
||||
wait_for_all_checks(client)
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'Unicode strings with encoding declaration are not supported.' not in res.data
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
@@ -102,9 +106,9 @@ def test_check_xpath_filter_utf8(client, live_server):
|
||||
|
||||
# Handle utf-8 charset replies https://github.com/dgtlmoon/changedetection.io/pull/613
|
||||
def test_check_xpath_text_function_utf8(client, live_server):
|
||||
filter='//item/title/text()'
|
||||
filter = '//item/title/text()'
|
||||
|
||||
d='''<?xml version="1.0" encoding="UTF-8"?>
|
||||
d = '''<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0">
|
||||
<channel>
|
||||
<title>rpilocator.com</title>
|
||||
@@ -140,14 +144,14 @@ def test_check_xpath_text_function_utf8(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(1)
|
||||
wait_for_all_checks(client)
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'Unicode strings with encoding declaration are not supported.' not in res.data
|
||||
|
||||
@@ -163,16 +167,12 @@ def test_check_xpath_text_function_utf8(client, live_server):
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_check_markup_xpath_filter_restriction(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
def test_check_markup_xpath_filter_restriction(client, live_server):
|
||||
xpath_filter = "//*[contains(@class, 'sametext')]"
|
||||
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
@@ -183,7 +183,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
@@ -195,7 +195,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# view it/reset state back to viewed
|
||||
client.get(url_for("diff_history_page", uuid="first"), follow_redirects=True)
|
||||
@@ -206,7 +206,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
@@ -215,10 +215,6 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
|
||||
|
||||
|
||||
def test_xpath_validation(client, live_server):
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
@@ -227,7 +223,7 @@ def test_xpath_validation(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
@@ -239,14 +235,172 @@ def test_xpath_validation(client, live_server):
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
def test_xpath23_prefix_validation(client, live_server):
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": "xpath:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"is not a valid XPath expression" in res.data
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
def test_xpath1_validation(client, live_server):
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": "xpath1:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"is not a valid XPath expression" in res.data
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
# actually only really used by the distll.io importer, but could be handy too
|
||||
def test_check_with_prefix_include_filters(client, live_server):
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
set_original_response()
|
||||
wait_for_all_checks(client)
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tags": "", "headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Some text thats the same" in res.data # in selector
|
||||
assert b"Some text that will change" not in res.data # not in selector
|
||||
|
||||
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
|
||||
def test_various_rules(client, live_server):
|
||||
# Just check these don't error
|
||||
# live_server_setup(live_server)
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("""<html>
|
||||
<body>
|
||||
Some initial text<br>
|
||||
<p>Which is across multiple lines</p>
|
||||
<br>
|
||||
So let's see what happens. <br>
|
||||
<div class="sametext">Some text thats the same</div>
|
||||
<div class="changetext">Some text that will change</div>
|
||||
<a href=''>some linky </a>
|
||||
<a href=''>another some linky </a>
|
||||
<!-- related to https://github.com/dgtlmoon/changedetection.io/pull/1774 -->
|
||||
<input type="email" id="email" />
|
||||
</body>
|
||||
</html>
|
||||
""")
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
for r in ['//div', '//a', 'xpath://div', 'xpath://a']:
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": r,
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
wait_for_all_checks(client)
|
||||
assert b"Updated watch." in res.data
|
||||
res = client.get(url_for("index"))
|
||||
assert b'fetch-error' not in res.data, f"Should not see errors after '{r} filter"
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
def test_xpath_20(client, live_server):
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
set_original_response()
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": "//*[contains(@class, 'sametext')]|//*[contains(@class, 'changetext')]",
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Some text thats the same" in res.data # in selector
|
||||
assert b"Some text that will change" in res.data # in selector
|
||||
|
||||
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
|
||||
def test_xpath_20_function_count(client, live_server):
|
||||
set_original_response()
|
||||
|
||||
# Add our URL to the import page
|
||||
@@ -257,23 +411,100 @@ def test_check_with_prefix_include_filters(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": "xpath:count(//div) * 123456789987654321",
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Some text thats the same" in res.data #in selector
|
||||
assert b"Some text that will change" not in res.data #not in selector
|
||||
assert b"246913579975308642" in res.data # in selector
|
||||
|
||||
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
|
||||
def test_xpath_20_function_count2(client, live_server):
|
||||
set_original_response()
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": "/html/body/count(div) * 123456789987654321",
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"246913579975308642" in res.data # in selector
|
||||
|
||||
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
|
||||
def test_xpath_20_function_string_join_matches(client, live_server):
|
||||
set_original_response()
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"include_filters": "xpath:string-join(//*[contains(@class, 'sametext')]|//*[matches(@class, 'changetext')], 'specialconjunction')",
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Some text thats the samespecialconjunctionSome text that will change" in res.data # in selector
|
||||
|
||||
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
|
||||
203
changedetectionio/tests/test_xpath_selector_unit.py
Normal file
@@ -0,0 +1,203 @@
|
||||
import sys
|
||||
import os
|
||||
import pytest
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
import html_tools
|
||||
|
||||
# test generation guide.
|
||||
# 1. Do not include encoding in the xml declaration if the test object is a str type.
|
||||
# 2. Always paraphrase test.
|
||||
|
||||
hotels = """
|
||||
<hotel>
|
||||
<branch location="California">
|
||||
<staff>
|
||||
<given_name>Christopher</given_name>
|
||||
<surname>Anderson</surname>
|
||||
<age>25</age>
|
||||
</staff>
|
||||
<staff>
|
||||
<given_name>Christopher</given_name>
|
||||
<surname>Carter</surname>
|
||||
<age>30</age>
|
||||
</staff>
|
||||
</branch>
|
||||
<branch location="Las Vegas">
|
||||
<staff>
|
||||
<given_name>Lisa</given_name>
|
||||
<surname>Walker</surname>
|
||||
<age>60</age>
|
||||
</staff>
|
||||
<staff>
|
||||
<given_name>Jessica</given_name>
|
||||
<surname>Walker</surname>
|
||||
<age>32</age>
|
||||
</staff>
|
||||
<staff>
|
||||
<given_name>Jennifer</given_name>
|
||||
<surname>Roberts</surname>
|
||||
<age>50</age>
|
||||
</staff>
|
||||
</branch>
|
||||
</hotel>"""
|
||||
|
||||
@pytest.mark.parametrize("html_content", [hotels])
|
||||
@pytest.mark.parametrize("xpath, answer", [('(//staff/given_name, //staff/age)', '25'),
|
||||
("xs:date('2023-10-10')", '2023-10-10'),
|
||||
("if (/hotel/branch[@location = 'California']/staff[1]/age = 25) then 'is 25' else 'is not 25'", 'is 25'),
|
||||
("if (//hotel/branch[@location = 'California']/staff[1]/age = 25) then 'is 25' else 'is not 25'", 'is 25'),
|
||||
("if (count(/hotel/branch/staff) = 5) then true() else false()", 'true'),
|
||||
("if (count(//hotel/branch/staff) = 5) then true() else false()", 'true'),
|
||||
("for $i in /hotel/branch/staff return if ($i/age >= 40) then upper-case($i/surname) else lower-case($i/surname)", 'anderson'),
|
||||
("given_name = 'Christopher' and age = 40", 'false'),
|
||||
("//given_name = 'Christopher' and //age = 40", 'false'),
|
||||
#("(staff/given_name, staff/age)", 'Lisa'),
|
||||
("(//staff/given_name, //staff/age)", 'Lisa'),
|
||||
#("hotel/branch[@location = 'California']/staff/age union hotel/branch[@location = 'Las Vegas']/staff/age", ''),
|
||||
("(//hotel/branch[@location = 'California']/staff/age union //hotel/branch[@location = 'Las Vegas']/staff/age)", '60'),
|
||||
("(200 to 210)", "205"),
|
||||
("(//hotel/branch[@location = 'California']/staff/age union //hotel/branch[@location = 'Las Vegas']/staff/age)", "50"),
|
||||
("(1, 9, 9, 5)", "5"),
|
||||
("(3, (), (14, 15), 92, 653)", "653"),
|
||||
("for $i in /hotel/branch/staff return $i/given_name", "Christopher"),
|
||||
("for $i in //hotel/branch/staff return $i/given_name", "Christopher"),
|
||||
("distinct-values(for $i in /hotel/branch/staff return $i/given_name)", "Jessica"),
|
||||
("distinct-values(for $i in //hotel/branch/staff return $i/given_name)", "Jessica"),
|
||||
("for $i in (7 to 15) return $i*10", "130"),
|
||||
("some $i in /hotel/branch/staff satisfies $i/age < 20", "false"),
|
||||
("some $i in //hotel/branch/staff satisfies $i/age < 20", "false"),
|
||||
("every $i in /hotel/branch/staff satisfies $i/age > 20", "true"),
|
||||
("every $i in //hotel/branch/staff satisfies $i/age > 20 ", "true"),
|
||||
("let $x := branch[@location = 'California'], $y := branch[@location = 'Las Vegas'] return (avg($x/staff/age), avg($y/staff/age))", "27.5"),
|
||||
("let $x := //branch[@location = 'California'], $y := //branch[@location = 'Las Vegas'] return (avg($x/staff/age), avg($y/staff/age))", "27.5"),
|
||||
("let $nu := 1, $de := 1000 return 'probability = ' || $nu div $de * 100 || '%'", "0.1%"),
|
||||
("let $nu := 2, $probability := function ($argument) { 'probability = ' || $nu div $argument * 100 || '%'}, $de := 5 return $probability($de)", "40%"),
|
||||
("'XPATH2.0-3.1 dissemination' instance of xs:string ", "true"),
|
||||
("'new stackoverflow question incoming' instance of xs:integer ", "false"),
|
||||
("'50000' cast as xs:integer", "50000"),
|
||||
("//branch[@location = 'California']/staff[1]/surname eq 'Anderson'", "true"),
|
||||
("fn:false()", "false")])
|
||||
def test_hotels(html_content, xpath, answer):
|
||||
html_content = html_tools.xpath_filter(xpath, html_content, append_pretty_line_formatting=True)
|
||||
assert type(html_content) == str
|
||||
assert answer in html_content
|
||||
|
||||
|
||||
|
||||
branches_to_visit = """<?xml version="1.0" ?>
|
||||
<branches_to_visit>
|
||||
<manager name="Godot" room_no="501">
|
||||
<branch>Area 51</branch>
|
||||
<branch>A place with no name</branch>
|
||||
<branch>Stalsk12</branch>
|
||||
</manager>
|
||||
<manager name="Freya" room_no="305">
|
||||
<branch>Stalsk12</branch>
|
||||
<branch>Barcelona</branch>
|
||||
<branch>Paris</branch>
|
||||
</manager>
|
||||
</branches_to_visit>"""
|
||||
@pytest.mark.parametrize("html_content", [branches_to_visit])
|
||||
@pytest.mark.parametrize("xpath, answer", [
|
||||
("manager[@name = 'Godot']/branch union manager[@name = 'Freya']/branch", "Area 51"),
|
||||
("//manager[@name = 'Godot']/branch union //manager[@name = 'Freya']/branch", "Stalsk12"),
|
||||
("manager[@name = 'Godot']/branch | manager[@name = 'Freya']/branch", "Stalsk12"),
|
||||
("//manager[@name = 'Godot']/branch | //manager[@name = 'Freya']/branch", "Stalsk12"),
|
||||
("manager/branch intersect manager[@name = 'Godot']/branch", "A place with no name"),
|
||||
("//manager/branch intersect //manager[@name = 'Godot']/branch", "A place with no name"),
|
||||
("manager[@name = 'Godot']/branch intersect manager[@name = 'Freya']/branch", ""),
|
||||
("manager/branch except manager[@name = 'Godot']/branch", "Barcelona"),
|
||||
("manager[@name = 'Godot']/branch[1] eq 'Area 51'", "true"),
|
||||
("//manager[@name = 'Godot']/branch[1] eq 'Area 51'", "true"),
|
||||
("manager[@name = 'Godot']/branch[1] eq 'Seoul'", "false"),
|
||||
("//manager[@name = 'Godot']/branch[1] eq 'Seoul'", "false"),
|
||||
("manager[@name = 'Godot']/branch[2] eq manager[@name = 'Freya']/branch[2]", "false"),
|
||||
("//manager[@name = 'Godot']/branch[2] eq //manager[@name = 'Freya']/branch[2]", "false"),
|
||||
("manager[1]/@room_no lt manager[2]/@room_no", "false"),
|
||||
("//manager[1]/@room_no lt //manager[2]/@room_no", "false"),
|
||||
("manager[1]/@room_no gt manager[2]/@room_no", "true"),
|
||||
("//manager[1]/@room_no gt //manager[2]/@room_no", "true"),
|
||||
("manager[@name = 'Godot']/branch[1] = 'Area 51'", "true"),
|
||||
("//manager[@name = 'Godot']/branch[1] = 'Area 51'", "true"),
|
||||
("manager[@name = 'Godot']/branch[1] = 'Seoul'", "false"),
|
||||
("//manager[@name = 'Godot']/branch[1] = 'Seoul'", "false"),
|
||||
("manager[@name = 'Godot']/branch = 'Area 51'", "true"),
|
||||
("//manager[@name = 'Godot']/branch = 'Area 51'", "true"),
|
||||
("manager[@name = 'Godot']/branch = 'Barcelona'", "false"),
|
||||
("//manager[@name = 'Godot']/branch = 'Barcelona'", "false"),
|
||||
("manager[1]/@room_no > manager[2]/@room_no", "true"),
|
||||
("//manager[1]/@room_no > //manager[2]/@room_no", "true"),
|
||||
("manager[@name = 'Godot']/branch[ . = 'Stalsk12'] is manager[1]/branch[1]", "false"),
|
||||
("//manager[@name = 'Godot']/branch[ . = 'Stalsk12'] is //manager[1]/branch[1]", "false"),
|
||||
("manager[@name = 'Godot']/branch[ . = 'Stalsk12'] is manager[1]/branch[3]", "true"),
|
||||
("//manager[@name = 'Godot']/branch[ . = 'Stalsk12'] is //manager[1]/branch[3]", "true"),
|
||||
("manager[@name = 'Godot']/branch[ . = 'Stalsk12'] << manager[1]/branch[1]", "false"),
|
||||
("//manager[@name = 'Godot']/branch[ . = 'Stalsk12'] << //manager[1]/branch[1]", "false"),
|
||||
("manager[@name = 'Godot']/branch[ . = 'Stalsk12'] >> manager[1]/branch[1]", "true"),
|
||||
("//manager[@name = 'Godot']/branch[ . = 'Stalsk12'] >> //manager[1]/branch[1]", "true"),
|
||||
("manager[@name = 'Godot']/branch[ . = 'Stalsk12'] is manager[@name = 'Freya']/branch[ . = 'Stalsk12']", "false"),
|
||||
("//manager[@name = 'Godot']/branch[ . = 'Stalsk12'] is //manager[@name = 'Freya']/branch[ . = 'Stalsk12']", "false"),
|
||||
("manager[1]/@name || manager[2]/@name", "GodotFreya"),
|
||||
("//manager[1]/@name || //manager[2]/@name", "GodotFreya"),
|
||||
])
|
||||
def test_branches_to_visit(html_content, xpath, answer):
|
||||
html_content = html_tools.xpath_filter(xpath, html_content, append_pretty_line_formatting=True)
|
||||
assert type(html_content) == str
|
||||
assert answer in html_content
|
||||
|
||||
trips = """
|
||||
<trips>
|
||||
<trip reservation_number="10">
|
||||
<depart>2023-10-06</depart>
|
||||
<arrive>2023-10-10</arrive>
|
||||
<traveler name="Christopher Anderson">
|
||||
<duration>4</duration>
|
||||
<price>2000.00</price>
|
||||
</traveler>
|
||||
</trip>
|
||||
<trip reservation_number="12">
|
||||
<depart>2023-10-06</depart>
|
||||
<arrive>2023-10-12</arrive>
|
||||
<traveler name="Frank Carter">
|
||||
<duration>6</duration>
|
||||
<price>3500.34</price>
|
||||
</traveler>
|
||||
</trip>
|
||||
</trips>"""
|
||||
@pytest.mark.parametrize("html_content", [trips])
|
||||
@pytest.mark.parametrize("xpath, answer", [
|
||||
("1 + 9 * 9 + 5 div 5", "83"),
|
||||
("(1 + 9 * 9 + 5) div 6", "14.5"),
|
||||
("23 idiv 3", "7"),
|
||||
("23 div 3", "7.66666666"),
|
||||
("for $i in ./trip return $i/traveler/duration * $i/traveler/price", "21002.04"),
|
||||
("for $i in ./trip return $i/traveler/duration ", "4"),
|
||||
("for $i in .//trip return $i/traveler/duration * $i/traveler/price", "21002.04"),
|
||||
("sum(for $i in ./trip return $i/traveler/duration * $i/traveler/price)", "29002.04"),
|
||||
("sum(for $i in .//trip return $i/traveler/duration * $i/traveler/price)", "29002.04"),
|
||||
#("trip[1]/depart - trip[1]/arrive", "fail_to_get_answer"),
|
||||
#("//trip[1]/depart - //trip[1]/arrive", "fail_to_get_answer"),
|
||||
#("trip[1]/depart + trip[1]/arrive", "fail_to_get_answer"),
|
||||
#("xs:date(trip[1]/depart) + xs:date(trip[1]/arrive)", "fail_to_get_answer"),
|
||||
("(//trip[1]/arrive cast as xs:date) - (//trip[1]/depart cast as xs:date)", "P4D"),
|
||||
("(//trip[1]/depart cast as xs:date) - (//trip[1]/arrive cast as xs:date)", "-P4D"),
|
||||
("(//trip[1]/depart cast as xs:date) + xs:dayTimeDuration('P3D')", "2023-10-09"),
|
||||
("(//trip[1]/depart cast as xs:date) - xs:dayTimeDuration('P3D')", "2023-10-03"),
|
||||
("(456, 623) instance of xs:integer", "false"),
|
||||
("(456, 623) instance of xs:integer*", "true"),
|
||||
("/trips/trip instance of element()", "false"),
|
||||
("/trips/trip instance of element()*", "true"),
|
||||
("/trips/trip[1]/arrive instance of xs:date", "false"),
|
||||
("date(/trips/trip[1]/arrive) instance of xs:date", "true"),
|
||||
("'8' cast as xs:integer", "8"),
|
||||
("'11.1E3' cast as xs:double", "11100"),
|
||||
("6.5 cast as xs:integer", "6"),
|
||||
#("/trips/trip[1]/arrive cast as xs:dateTime", "fail_to_get_answer"),
|
||||
("/trips/trip[1]/arrive cast as xs:date", "2023-10-10"),
|
||||
("('2023-10-12') cast as xs:date", "2023-10-12"),
|
||||
("for $i in //trip return concat($i/depart, ' ', $i/arrive)", "2023-10-06 2023-10-10"),
|
||||
])
|
||||
def test_trips(html_content, xpath, answer):
|
||||
html_content = html_tools.xpath_filter(xpath, html_content, append_pretty_line_formatting=True)
|
||||
assert type(html_content) == str
|
||||
assert answer in html_content
|
||||
54
changedetectionio/tests/unit/test_watch_model.py
Normal file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# run from dir above changedetectionio/ dir
|
||||
# python3 -m unittest changedetectionio.tests.unit.test_notification_diff
|
||||
|
||||
import unittest
|
||||
import os
|
||||
|
||||
from changedetectionio.model import Watch
|
||||
|
||||
# mostly
|
||||
class TestDiffBuilder(unittest.TestCase):
|
||||
|
||||
def test_watch_get_suggested_from_diff_timestamp(self):
|
||||
import uuid as uuid_builder
|
||||
watch = Watch.model(datastore_path='/tmp', default={})
|
||||
watch.ensure_data_dir_exists()
|
||||
|
||||
watch['last_viewed'] = 110
|
||||
|
||||
watch.save_history_text(contents=b"hello world", timestamp=100, snapshot_id=str(uuid_builder.uuid4()))
|
||||
watch.save_history_text(contents=b"hello world", timestamp=105, snapshot_id=str(uuid_builder.uuid4()))
|
||||
watch.save_history_text(contents=b"hello world", timestamp=109, snapshot_id=str(uuid_builder.uuid4()))
|
||||
watch.save_history_text(contents=b"hello world", timestamp=112, snapshot_id=str(uuid_builder.uuid4()))
|
||||
watch.save_history_text(contents=b"hello world", timestamp=115, snapshot_id=str(uuid_builder.uuid4()))
|
||||
watch.save_history_text(contents=b"hello world", timestamp=117, snapshot_id=str(uuid_builder.uuid4()))
|
||||
|
||||
p = watch.get_next_snapshot_key_to_last_viewed
|
||||
assert p == "112", "Correct last-viewed timestamp was detected"
|
||||
|
||||
# When there is only one step of difference from the end of the list, it should return second-last change
|
||||
watch['last_viewed'] = 116
|
||||
p = watch.get_next_snapshot_key_to_last_viewed
|
||||
assert p == "115", "Correct 'second last' last-viewed timestamp was detected when using the last timestamp"
|
||||
|
||||
watch['last_viewed'] = 99
|
||||
p = watch.get_next_snapshot_key_to_last_viewed
|
||||
assert p == "100"
|
||||
|
||||
watch['last_viewed'] = 200
|
||||
p = watch.get_next_snapshot_key_to_last_viewed
|
||||
assert p == "115", "When the 'last viewed' timestamp is greater than the newest snapshot, return second last "
|
||||
|
||||
watch['last_viewed'] = 109
|
||||
p = watch.get_next_snapshot_key_to_last_viewed
|
||||
assert p == "109", "Correct when its the same time"
|
||||
|
||||
# new empty one
|
||||
watch = Watch.model(datastore_path='/tmp', default={})
|
||||
p = watch.get_next_snapshot_key_to_last_viewed
|
||||
assert p == None, "None when no history available"
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -205,6 +205,9 @@ def live_server_setup(live_server):
|
||||
with open("test-datastore/notification-url.txt", "w") as f:
|
||||
f.write(request.url)
|
||||
|
||||
with open("test-datastore/notification-headers.txt", "w") as f:
|
||||
f.write(str(request.headers))
|
||||
|
||||
if request.content_type:
|
||||
with open("test-datastore/notification-content-type.txt", "w") as f:
|
||||
f.write(request.content_type)
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
import os
|
||||
from flask import url_for
|
||||
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
|
||||
def test_setup(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Add a site in paused mode, add an invalid filter, we should still have visual selector data ready
|
||||
def test_visual_selector_content_ready(client, live_server):
|
||||
import os
|
||||
import json
|
||||
|
||||
assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
|
||||
time.sleep(1)
|
||||
live_server_setup(live_server)
|
||||
|
||||
|
||||
# Add our URL to the import page, because the docker container (playwright/selenium) wont be able to connect to our usual test url
|
||||
test_url = "https://changedetection.io/ci-test/test-runjs.html"
|
||||
@@ -53,6 +54,13 @@ def test_visual_selector_content_ready(client, live_server):
|
||||
with open(os.path.join('test-datastore', uuid, 'elements.json'), 'r') as f:
|
||||
json.load(f)
|
||||
|
||||
# Attempt to fetch it via the web hook that the browser would use
|
||||
res = client.get(url_for('static_content', group='visual_selector_data', filename=uuid))
|
||||
json.loads(res.data)
|
||||
assert res.mimetype == 'application/json'
|
||||
assert res.status_code == 200
|
||||
|
||||
|
||||
# Some options should be enabled
|
||||
# @todo - in the future, the visibility should be toggled by JS from the request type setting
|
||||
res = client.get(
|
||||
@@ -60,4 +68,75 @@ def test_visual_selector_content_ready(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b'notification_screenshot' in res.data
|
||||
client.get(
|
||||
url_for("form_delete", uuid="all"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
def test_basic_browserstep(client, live_server):
|
||||
|
||||
assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
|
||||
#live_server_setup(live_server)
|
||||
|
||||
# Add our URL to the import page, because the docker container (playwright/selenium) wont be able to connect to our usual test url
|
||||
test_url = "https://changedetection.io/ci-test/test-runjs.html"
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_webdriver",
|
||||
'browser_steps-0-operation': 'Goto site',
|
||||
'browser_steps-1-operation': 'Click element',
|
||||
'browser_steps-1-selector': 'button[name=test-button]',
|
||||
'browser_steps-1-optional_value': ''
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"unpaused" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
uuid = extract_UUID_from_client(client)
|
||||
|
||||
# Check HTML conversion detected and workd
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid=uuid),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"This text should be removed" not in res.data
|
||||
assert b"I smell JavaScript because the button was pressed" in res.data
|
||||
|
||||
# now test for 404 errors
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid=uuid, unpause_on_save=1),
|
||||
data={
|
||||
"url": "https://changedetection.io/404",
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_webdriver",
|
||||
'browser_steps-0-operation': 'Goto site',
|
||||
'browser_steps-1-operation': 'Click element',
|
||||
'browser_steps-1-selector': 'button[name=test-button]',
|
||||
'browser_steps-1-optional_value': ''
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"unpaused" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'Error - 404' in res.data
|
||||
|
||||
client.get(
|
||||
url_for("form_delete", uuid="all"),
|
||||
follow_redirects=True
|
||||
)
|
||||