mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-10-30 14:17:40 +00:00
Compare commits
85 Commits
bugfix-del
...
1800-selen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
27db6bcc13 | ||
|
|
fd01a13314 | ||
|
|
3b4b10f22f | ||
|
|
7debccca73 | ||
|
|
5801c0f2c8 | ||
|
|
8547894b65 | ||
|
|
c06bc7d018 | ||
|
|
59578803bf | ||
|
|
a5db3a0b99 | ||
|
|
49a5337ac4 | ||
|
|
ceac8c21e4 | ||
|
|
a7132b1cfc | ||
|
|
2b948c15c1 | ||
|
|
34f2d30968 | ||
|
|
700729a332 | ||
|
|
b6060ac90c | ||
|
|
5cccccb0b6 | ||
|
|
c52eb512e8 | ||
|
|
7282df9c08 | ||
|
|
e30b17b8bc | ||
|
|
1e88136325 | ||
|
|
57de4ffe4f | ||
|
|
51e2e8a226 | ||
|
|
8887459462 | ||
|
|
460c724e51 | ||
|
|
dcf4bf37ed | ||
|
|
e3cf22fc27 | ||
|
|
d497db639e | ||
|
|
7355ac8d21 | ||
|
|
2f2d0ea0f2 | ||
|
|
a958e1fe20 | ||
|
|
5dc3b00ec6 | ||
|
|
8ac4757cd9 | ||
|
|
2180bb256d | ||
|
|
212f15ad5f | ||
|
|
22b2068208 | ||
|
|
4916043055 | ||
|
|
7bf13bad30 | ||
|
|
0aa2276afb | ||
|
|
3b875e5a6a | ||
|
|
8ec50294d2 | ||
|
|
e3c9255d9e | ||
|
|
3b03bdcb82 | ||
|
|
e25792bcec | ||
|
|
bf4168a2aa | ||
|
|
9d37eaa57b | ||
|
|
40d01acde9 | ||
|
|
d34832de73 | ||
|
|
ed4bafae63 | ||
|
|
3a5bceadfa | ||
|
|
6abdf2d332 | ||
|
|
dee23709a9 | ||
|
|
52df3b10e7 | ||
|
|
087d21c61e | ||
|
|
171faf465c | ||
|
|
a3d8bd0b1a | ||
|
|
6ef8a1c18f | ||
|
|
126f0fbf87 | ||
|
|
cfa712c88c | ||
|
|
6a6ba40b6a | ||
|
|
e7f726c057 | ||
|
|
df0cc7b585 | ||
|
|
76cd98b521 | ||
|
|
f84ba0fb31 | ||
|
|
c35cbd33d6 | ||
|
|
661f7fe32c | ||
|
|
7cb7eebbc5 | ||
|
|
aaceb4ebad | ||
|
|
56cf6e5ea5 | ||
|
|
1987e109e8 | ||
|
|
20d65cdd26 | ||
|
|
37ff5f6d37 | ||
|
|
2f777ea3bb | ||
|
|
e709201955 | ||
|
|
572f71299f | ||
|
|
5f150c4f03 | ||
|
|
8cbf8e8f57 | ||
|
|
0e65dda5b6 | ||
|
|
72a415144b | ||
|
|
52f2c00308 | ||
|
|
72311fb845 | ||
|
|
f1b10a22f8 | ||
|
|
a4c620c308 | ||
|
|
9434eac72d | ||
|
|
edb5e20de6 |
@@ -1,2 +1,18 @@
|
||||
.git
|
||||
.github
|
||||
changedetectionio/processors/__pycache__
|
||||
changedetectionio/api/__pycache__
|
||||
changedetectionio/model/__pycache__
|
||||
changedetectionio/blueprint/price_data_follower/__pycache__
|
||||
changedetectionio/blueprint/tags/__pycache__
|
||||
changedetectionio/blueprint/__pycache__
|
||||
changedetectionio/blueprint/browser_steps/__pycache__
|
||||
changedetectionio/fetchers/__pycache__
|
||||
changedetectionio/tests/visualselector/__pycache__
|
||||
changedetectionio/tests/restock/__pycache__
|
||||
changedetectionio/tests/__pycache__
|
||||
changedetectionio/tests/fetchers/__pycache__
|
||||
changedetectionio/tests/unit/__pycache__
|
||||
changedetectionio/tests/proxy_list/__pycache__
|
||||
changedetectionio/__pycache__
|
||||
|
||||
|
||||
4
.github/test/Dockerfile-alpine
vendored
4
.github/test/Dockerfile-alpine
vendored
@@ -2,7 +2,7 @@
|
||||
# Test that we can still build on Alpine (musl modified libc https://musl.libc.org/)
|
||||
# Some packages wont install via pypi because they dont have a wheel available under this architecture.
|
||||
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.16
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.18
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
COPY requirements.txt /requirements.txt
|
||||
@@ -26,6 +26,6 @@ RUN \
|
||||
py3-pip && \
|
||||
echo "**** pip3 install test of changedetection.io ****" && \
|
||||
pip3 install -U pip wheel setuptools && \
|
||||
pip3 install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.16/ -r /requirements.txt && \
|
||||
pip3 install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.18/ -r /requirements.txt && \
|
||||
apk del --purge \
|
||||
build-dependencies
|
||||
|
||||
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
@@ -30,11 +30,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
@@ -59,4 +59,4 @@ jobs:
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
||||
26
.github/workflows/containers.yml
vendored
26
.github/workflows/containers.yml
vendored
@@ -39,11 +39,11 @@ jobs:
|
||||
# Or if we are in a tagged release scenario.
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }} || ${{ github.event.release.tag_name }} != ''
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: 3.11
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -58,27 +58,27 @@ jobs:
|
||||
echo ${{ github.ref }} > changedetectionio/tag.txt
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
version: latest
|
||||
@@ -88,14 +88,14 @@ jobs:
|
||||
- name: Build and push :dev
|
||||
id: docker_build
|
||||
if: ${{ github.ref }} == "refs/heads/master"
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
# Looks like this was disabled
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
- name: Build and push :tag
|
||||
id: docker_build_tag_release
|
||||
if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.')
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
@@ -115,7 +115,7 @@ jobs:
|
||||
ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }}
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest
|
||||
ghcr.io/dgtlmoon/changedetection.io:latest
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
# Looks like this was disabled
|
||||
@@ -125,7 +125,7 @@ jobs:
|
||||
run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
|
||||
18
.github/workflows/test-container-build.yml
vendored
18
.github/workflows/test-container-build.yml
vendored
@@ -24,22 +24,22 @@ jobs:
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: 3.11
|
||||
|
||||
# Just test that the build works, some libraries won't compile on ARM/rPi etc
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
version: latest
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
# Check we can still build under alpine/musl
|
||||
- name: Test that the docker containers can build (musl via alpine check)
|
||||
id: docker_build_musl
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./
|
||||
file: ./.github/test/Dockerfile-alpine
|
||||
@@ -57,12 +57,12 @@ jobs:
|
||||
|
||||
- name: Test that the docker containers can build
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v5
|
||||
# https://github.com/docker/build-push-action#customizing
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
platforms: linux/arm/v7,linux/arm/v6,linux/amd64,linux/arm64,
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
|
||||
29
.github/workflows/test-only.yml
vendored
29
.github/workflows/test-only.yml
vendored
@@ -7,13 +7,13 @@ jobs:
|
||||
test-application:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Mainly just for link/flake8
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v2
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
@@ -29,17 +29,24 @@ jobs:
|
||||
docker network create changedet-network
|
||||
|
||||
# Selenium+browserless
|
||||
docker run --network changedet-network -d --hostname selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome-debug:3.141.59
|
||||
# 117.0 didnt work.. try dev
|
||||
docker run --network changedet-network -d --hostname selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome:117.0
|
||||
docker run --network changedet-network -d --hostname browserless -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.53-chrome-stable
|
||||
|
||||
- name: Build changedetection.io container for testing
|
||||
run: |
|
||||
# Build a changedetection.io container and start testing inside
|
||||
docker build . -t test-changedetectionio
|
||||
# Debug info
|
||||
docker run test-changedetectionio bash -c 'pip list'
|
||||
|
||||
- name: Spin up ancillary SMTP+Echo message test server
|
||||
run: |
|
||||
# Debug SMTP server/echo message back server
|
||||
docker run --network changedet-network -d -p 11025:11025 -p 11080:11080 --hostname mailserver test-changedetectionio bash -c 'python changedetectionio/tests/smtp/smtp-test-server.py'
|
||||
|
||||
- name: Test built container with pytest
|
||||
run: |
|
||||
|
||||
# Unit tests
|
||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_notification_diff'
|
||||
|
||||
@@ -58,11 +65,16 @@ jobs:
|
||||
# Settings headers playwright tests - Call back in from Browserless, check headers
|
||||
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "WEBDRIVER_URL=http://selenium:4444/wd/hub" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||
|
||||
# restock detection via playwright - added name=changedet here so that playwright/browserless can connect to it
|
||||
docker run --rm --name "changedet" -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-port=5004 --live-server-host=0.0.0.0 tests/restock/test_restock.py'
|
||||
|
||||
- name: Test SMTP notification mime types
|
||||
run: |
|
||||
# SMTP content types - needs the 'Debug SMTP server/echo message back server' container from above
|
||||
docker run --rm --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/smtp/test_notification_smtp.py'
|
||||
|
||||
- name: Test with puppeteer fetcher and disk cache
|
||||
run: |
|
||||
docker run --rm -e "PUPPETEER_DISK_CACHE=/tmp/data/" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
|
||||
@@ -72,6 +84,7 @@ jobs:
|
||||
run: |
|
||||
cd changedetectionio
|
||||
./run_proxy_tests.sh
|
||||
# And again with PLAYWRIGHT_DRIVER_URL=..
|
||||
cd ..
|
||||
|
||||
- name: Test changedetection.io container starts+runs basically without error
|
||||
@@ -87,4 +100,4 @@ jobs:
|
||||
|
||||
#export WEBDRIVER_URL=http://localhost:4444/wd/hub
|
||||
#pytest tests/fetchers/test_content.py
|
||||
#pytest tests/test_errorhandling.py
|
||||
#pytest tests/test_errorhandling.py
|
||||
|
||||
8
.github/workflows/test-pip-build.yml
vendored
8
.github/workflows/test-pip-build.yml
vendored
@@ -11,12 +11,12 @@ jobs:
|
||||
test-pip-build-basics:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: 3.11
|
||||
|
||||
|
||||
- name: Test that the basic pip built package runs without error
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# pip dependencies install stage
|
||||
FROM python:3.10-slim as builder
|
||||
FROM python:3.11-slim-bullseye as builder
|
||||
|
||||
# See `cryptography` pin comment in requirements.txt
|
||||
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
|
||||
@@ -29,7 +29,7 @@ RUN pip install --target=/dependencies playwright~=1.27.1 \
|
||||
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
|
||||
|
||||
# Final image stage
|
||||
FROM python:3.10-slim
|
||||
FROM python:3.11-slim-bullseye
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
libssl1.1 \
|
||||
|
||||
@@ -13,3 +13,6 @@ include changedetection.py
|
||||
global-exclude *.pyc
|
||||
global-exclude node_modules
|
||||
global-exclude venv
|
||||
|
||||
global-exclude test-datastore
|
||||
global-exclude changedetection.io*dist-info
|
||||
|
||||
@@ -2,19 +2,44 @@
|
||||
|
||||
Live your data-life pro-actively, track website content changes and receive notifications via Discord, Email, Slack, Telegram and 70+ more
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://changedetection.io)
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring, list of websites with changes" title="Self-hosted web page change monitoring, list of websites with changes" />](https://changedetection.io)
|
||||
|
||||
|
||||
[**Don't have time? Let us host it for you! try our extremely affordable subscription use our proxies and support!**](https://changedetection.io)
|
||||
|
||||
|
||||
#### Example use cases
|
||||
### Target specific parts of the webpage using the Visual Selector tool.
|
||||
|
||||
Available when connected to a <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Playwright-content-fetcher">playwright content fetcher</a> (included as part of our subscription service)
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/visualselector-anim.gif" style="max-width:100%;" alt="Select parts and elements of a web page to monitor for changes" title="Select parts and elements of a web page to monitor for changes" />](https://changedetection.io?src=pip)
|
||||
|
||||
### Easily see what changed, examine by word, line, or individual character.
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot-diff.png" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />](https://changedetection.io?src=pip)
|
||||
|
||||
|
||||
### Perform interactive browser steps
|
||||
|
||||
Fill in text boxes, click buttons and more, setup your changedetection scenario.
|
||||
|
||||
Using the **Browser Steps** configuration, add basic steps before performing change detection, such as logging into websites, adding a product to a cart, accept cookie logins, entering dates and refining searches.
|
||||
|
||||
[<img src="docs/browsersteps-anim.gif" style="max-width:100%;" alt="Website change detection with interactive browser steps, detect changes behind login and password, search queries and more" title="Website change detection with interactive browser steps, detect changes behind login and password, search queries and more" />](https://changedetection.io?src=pip)
|
||||
|
||||
After **Browser Steps** have been run, then visit the **Visual Selector** tab to refine the content you're interested in.
|
||||
Requires Playwright to be enabled.
|
||||
|
||||
|
||||
### Example use cases
|
||||
|
||||
- Products and services have a change in pricing
|
||||
- _Out of stock notification_ and _Back In stock notification_
|
||||
- Monitor and track PDF file changes, know when a PDF file has text changes.
|
||||
- Governmental department updates (changes are often only on their websites)
|
||||
- New software releases, security advisories when you're not on their mailing list.
|
||||
- Festivals with changes
|
||||
- Discogs restock alerts and monitoring
|
||||
- Realestate listing changes
|
||||
- Know when your favourite whiskey is on sale, or other special deals are announced before anyone else
|
||||
- COVID related news from government websites
|
||||
@@ -27,18 +52,34 @@ Live your data-life pro-actively, track website content changes and receive noti
|
||||
- Create RSS feeds based on changes in web content
|
||||
- Monitor HTML source code for unexpected changes, strengthen your PCI compliance
|
||||
- You have a very sensitive list of URLs to watch and you do _not_ want to use the paid alternatives. (Remember, _you_ are the product)
|
||||
- Get notified when certain keywords appear in Twitter search results
|
||||
- Proactively search for jobs, get notified when companies update their careers page, search job portals for keywords.
|
||||
- Get alerts when new job positions are open on Bamboo HR and other job platforms
|
||||
- Website defacement monitoring
|
||||
- Pokémon Card Restock Tracker / Pokémon TCG Tracker
|
||||
- RegTech - stay ahead of regulatory changes, regulatory compliance
|
||||
|
||||
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver and Playwright!</a>_
|
||||
|
||||
#### Key Features
|
||||
|
||||
- Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions!
|
||||
- Target elements with xPath and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
|
||||
- Target elements with xPath(1.0) and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
|
||||
- Switch between fast non-JS and Chrome JS based "fetchers"
|
||||
- Track changes in PDF files (Monitor text changed in the PDF, Also monitor PDF filesize and checksums)
|
||||
- Easily specify how often a site should be checked
|
||||
- Execute JS before extracting text (Good for logging in, see examples in the UI!)
|
||||
- Override Request Headers, Specify `POST` or `GET` and other methods
|
||||
- Use the "Visual Selector" to help target specific elements
|
||||
- Configurable [proxy per watch](https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration)
|
||||
- Send a screenshot with the notification when a change is detected in the web page
|
||||
|
||||
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $100 using our signup link.
|
||||
|
||||
[Oxylabs](https://oxylabs.go2cloud.org/SH2d) is also an excellent proxy provider and well worth using, they offer Residental, ISP, Rotating and many other proxy types to suit your project.
|
||||
|
||||
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||
|
||||
|
||||
|
||||
```bash
|
||||
@@ -54,5 +95,5 @@ $ changedetection.io -d /path/to/empty/data/dir -p 5000
|
||||
|
||||
Then visit http://127.0.0.1:5000 , You should now be able to access the UI.
|
||||
|
||||
See https://github.com/dgtlmoon/changedetection.io for more information.
|
||||
See https://changedetection.io for more information.
|
||||
|
||||
|
||||
17
README.md
17
README.md
@@ -5,7 +5,7 @@
|
||||
_Live your data-life pro-actively._
|
||||
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://changedetection.io?src=github)
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web site page change monitoring" title="Self-hosted web site page change monitoring" />](https://changedetection.io?src=github)
|
||||
|
||||
[![Release Version][release-shield]][release-link] [![Docker Pulls][docker-pulls]][docker-link] [![License][license-shield]](LICENSE.md)
|
||||
|
||||
@@ -22,7 +22,7 @@ _Live your data-life pro-actively._
|
||||
|
||||
Available when connected to a <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Playwright-content-fetcher">playwright content fetcher</a> (included as part of our subscription service)
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/visualselector-anim.gif" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />](https://changedetection.io?src=github)
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/visualselector-anim.gif" style="max-width:100%;" alt="Select parts and elements of a web page to monitor for changes" title="Select parts and elements of a web page to monitor for changes" />](https://changedetection.io?src=github)
|
||||
|
||||
### Easily see what changed, examine by word, line, or individual character.
|
||||
|
||||
@@ -35,7 +35,7 @@ Fill in text boxes, click buttons and more, setup your changedetection scenario.
|
||||
|
||||
Using the **Browser Steps** configuration, add basic steps before performing change detection, such as logging into websites, adding a product to a cart, accept cookie logins, entering dates and refining searches.
|
||||
|
||||
[<img src="docs/browsersteps-anim.gif" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Website change detection with interactive browser steps, login, cookies etc" />](https://changedetection.io?src=github)
|
||||
[<img src="docs/browsersteps-anim.gif" style="max-width:100%;" alt="Website change detection with interactive browser steps, detect changes behind login and password, search queries and more" title="Website change detection with interactive browser steps, detect changes behind login and password, search queries and more" />](https://changedetection.io?src=github)
|
||||
|
||||
After **Browser Steps** have been run, then visit the **Visual Selector** tab to refine the content you're interested in.
|
||||
Requires Playwright to be enabled.
|
||||
@@ -67,13 +67,14 @@ Requires Playwright to be enabled.
|
||||
- Get alerts when new job positions are open on Bamboo HR and other job platforms
|
||||
- Website defacement monitoring
|
||||
- Pokémon Card Restock Tracker / Pokémon TCG Tracker
|
||||
- RegTech - stay ahead of regulatory changes, regulatory compliance
|
||||
|
||||
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver and Playwright!</a>_
|
||||
|
||||
#### Key Features
|
||||
|
||||
- Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions!
|
||||
- Target elements with xPath and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
|
||||
- Target elements with xPath(1.0) and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
|
||||
- Switch between fast non-JS and Chrome JS based "fetchers"
|
||||
- Track changes in PDF files (Monitor text changed in the PDF, Also monitor PDF filesize and checksums)
|
||||
- Easily specify how often a site should be checked
|
||||
@@ -85,6 +86,8 @@ _Need an actual Chrome runner with Javascript support? We support fetching via W
|
||||
|
||||
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $100 using our signup link.
|
||||
|
||||
[Oxylabs](https://oxylabs.go2cloud.org/SH2d) is also an excellent proxy provider and well worth using, they offer Residental, ISP, Rotating and many other proxy types to suit your project.
|
||||
|
||||
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||
|
||||
## Installation
|
||||
@@ -144,7 +147,7 @@ See the wiki for more information https://github.com/dgtlmoon/changedetection.io
|
||||
|
||||
## Filters
|
||||
|
||||
XPath, JSONPath, jq, and CSS support comes baked in! You can be as specific as you need, use XPath exported from various XPath element query creation tools.
|
||||
XPath(1.0), JSONPath, jq, and CSS support comes baked in! You can be as specific as you need, use XPath exported from various XPath element query creation tools.
|
||||
(We support LXML `re:test`, `re:match` and `re:replace`.)
|
||||
|
||||
## Notifications
|
||||
@@ -183,7 +186,7 @@ This will re-parse the JSON and apply formatting to the text, making it super ea
|
||||
|
||||
### JSONPath or jq?
|
||||
|
||||
For more complex parsing, filtering, and modifying of JSON data, jq is recommended due to the built-in operators and functions. Refer to the [documentation](https://stedolan.github.io/jq/manual/) for more specifc information on jq.
|
||||
For more complex parsing, filtering, and modifying of JSON data, jq is recommended due to the built-in operators and functions. Refer to the [documentation](https://stedolan.github.io/jq/manual/) for more specific information on jq.
|
||||
|
||||
One big advantage of `jq` is that you can use logic in your JSON filter, such as filters to only show items that have a value greater than/less than etc.
|
||||
|
||||
@@ -223,7 +226,7 @@ The application also supports notifying you that it can follow this information
|
||||
|
||||
## Proxy Configuration
|
||||
|
||||
See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration , we also support using [BrightData proxy services where possible]( https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support)
|
||||
See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration , we also support using [Bright Data proxy services where possible](https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support) and [Oxylabs](https://oxylabs.go2cloud.org/SH2d) proxy services.
|
||||
|
||||
## Raspberry Pi support?
|
||||
|
||||
|
||||
@@ -38,7 +38,9 @@ from flask_paginate import Pagination, get_page_parameter
|
||||
from changedetectionio import html_tools
|
||||
from changedetectionio.api import api_v1
|
||||
|
||||
__version__ = '0.42.2'
|
||||
__version__ = '0.45.3'
|
||||
|
||||
from changedetectionio.store import BASE_URL_NOT_SET_TEXT
|
||||
|
||||
datastore = None
|
||||
|
||||
@@ -317,25 +319,21 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
return "Access denied, bad token", 403
|
||||
|
||||
from . import diff
|
||||
limit_tag = request.args.get('tag')
|
||||
limit_tag = request.args.get('tag', '').lower().strip()
|
||||
# Be sure limit_tag is a uuid
|
||||
for uuid, tag in datastore.data['settings']['application'].get('tags', {}).items():
|
||||
if limit_tag == tag.get('title', '').lower().strip():
|
||||
limit_tag = uuid
|
||||
|
||||
# Sort by last_changed and add the uuid which is usually the key..
|
||||
sorted_watches = []
|
||||
|
||||
# @todo needs a .itemsWithTag() or something - then we can use that in Jinaj2 and throw this away
|
||||
for uuid, watch in datastore.data['watching'].items():
|
||||
|
||||
if limit_tag != None:
|
||||
# Support for comma separated list of tags.
|
||||
for tag_in_watch in watch['tag'].split(','):
|
||||
tag_in_watch = tag_in_watch.strip()
|
||||
if tag_in_watch == limit_tag:
|
||||
watch['uuid'] = uuid
|
||||
sorted_watches.append(watch)
|
||||
|
||||
else:
|
||||
watch['uuid'] = uuid
|
||||
sorted_watches.append(watch)
|
||||
if limit_tag and not limit_tag in watch['tags']:
|
||||
continue
|
||||
watch['uuid'] = uuid
|
||||
sorted_watches.append(watch)
|
||||
|
||||
sorted_watches.sort(key=lambda x: x.last_changed, reverse=False)
|
||||
|
||||
@@ -359,11 +357,11 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
# Include a link to the diff page, they will have to login here to see if password protection is enabled.
|
||||
# Description is the page you watch, link takes you to the diff JS UI page
|
||||
base_url = datastore.data['settings']['application']['base_url']
|
||||
if base_url == '':
|
||||
base_url = "<base-url-env-var-not-set>"
|
||||
# Dict val base_url will get overriden with the env var if it is set.
|
||||
ext_base_url = datastore.data['settings']['application'].get('active_base_url')
|
||||
|
||||
diff_link = {'href': "{}{}".format(base_url, url_for('diff_history_page', uuid=watch['uuid']))}
|
||||
# Because we are called via whatever web server, flask should figure out the right path (
|
||||
diff_link = {'href': url_for('diff_history_page', uuid=watch['uuid'], _external=True)}
|
||||
|
||||
fe.link(link=diff_link)
|
||||
|
||||
@@ -392,9 +390,17 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
@app.route("/", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def index():
|
||||
global datastore
|
||||
from changedetectionio import forms
|
||||
|
||||
limit_tag = request.args.get('tag')
|
||||
limit_tag = request.args.get('tag', '').lower().strip()
|
||||
|
||||
# Be sure limit_tag is a uuid
|
||||
for uuid, tag in datastore.data['settings']['application'].get('tags', {}).items():
|
||||
if limit_tag == tag.get('title', '').lower().strip():
|
||||
limit_tag = uuid
|
||||
|
||||
|
||||
# Redirect for the old rss path which used the /?rss=true
|
||||
if request.args.get('rss'):
|
||||
return redirect(url_for('rss', tag=limit_tag))
|
||||
@@ -414,30 +420,15 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
sorted_watches = []
|
||||
search_q = request.args.get('q').strip().lower() if request.args.get('q') else False
|
||||
for uuid, watch in datastore.data['watching'].items():
|
||||
|
||||
if limit_tag:
|
||||
# Support for comma separated list of tags.
|
||||
if not watch.get('tag'):
|
||||
if limit_tag and not limit_tag in watch['tags']:
|
||||
continue
|
||||
for tag_in_watch in watch.get('tag', '').split(','):
|
||||
tag_in_watch = tag_in_watch.strip()
|
||||
if tag_in_watch == limit_tag:
|
||||
watch['uuid'] = uuid
|
||||
if search_q:
|
||||
if (watch.get('title') and search_q in watch.get('title').lower()) or search_q in watch.get('url', '').lower():
|
||||
sorted_watches.append(watch)
|
||||
else:
|
||||
sorted_watches.append(watch)
|
||||
|
||||
else:
|
||||
#watch['uuid'] = uuid
|
||||
if search_q:
|
||||
if (watch.get('title') and search_q in watch.get('title').lower()) or search_q in watch.get('url', '').lower():
|
||||
sorted_watches.append(watch)
|
||||
else:
|
||||
if search_q:
|
||||
if (watch.get('title') and search_q in watch.get('title').lower()) or search_q in watch.get('url', '').lower():
|
||||
sorted_watches.append(watch)
|
||||
else:
|
||||
sorted_watches.append(watch)
|
||||
|
||||
existing_tags = datastore.get_all_tags()
|
||||
form = forms.quickWatchForm(request.form)
|
||||
page = request.args.get(get_page_parameter(), type=int, default=1)
|
||||
total_count = len(sorted_watches)
|
||||
@@ -452,6 +443,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
# Don't link to hosting when we're on the hosting environment
|
||||
active_tag=limit_tag,
|
||||
app_rss_token=datastore.data['settings']['application']['rss_access_token'],
|
||||
datastore=datastore,
|
||||
form=form,
|
||||
guid=datastore.data['app_guid'],
|
||||
has_proxies=datastore.proxy_list,
|
||||
@@ -463,7 +455,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
sort_attribute=request.args.get('sort') if request.args.get('sort') else request.cookies.get('sort'),
|
||||
sort_order=request.args.get('order') if request.args.get('order') else request.cookies.get('order'),
|
||||
system_default_fetcher=datastore.data['settings']['application'].get('fetch_backend'),
|
||||
tags=existing_tags,
|
||||
tags=datastore.data['settings']['application'].get('tags'),
|
||||
watches=sorted_watches
|
||||
)
|
||||
|
||||
@@ -606,9 +598,13 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
# proxy_override set to the json/text list of the items
|
||||
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||
data=default,
|
||||
data=default
|
||||
)
|
||||
|
||||
# For the form widget tag uuid lookup
|
||||
form.tags.datastore = datastore # in _value
|
||||
|
||||
|
||||
form.fetch_backend.choices.append(("system", 'System settings default'))
|
||||
|
||||
# form.browser_steps[0] can be assumed that we 'goto url' first
|
||||
@@ -659,6 +655,16 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
extra_update_obj['filter_text_replaced'] = True
|
||||
extra_update_obj['filter_text_removed'] = True
|
||||
|
||||
# Because wtforms doesn't support accessing other data in process_ , but we convert the CSV list of tags back to a list of UUIDs
|
||||
tag_uuids = []
|
||||
if form.data.get('tags'):
|
||||
# Sometimes in testing this can be list, dont know why
|
||||
if type(form.data.get('tags')) == list:
|
||||
extra_update_obj['tags'] = form.data.get('tags')
|
||||
else:
|
||||
for t in form.data.get('tags').split(','):
|
||||
tag_uuids.append(datastore.add_tag(name=t))
|
||||
extra_update_obj['tags'] = tag_uuids
|
||||
|
||||
datastore.data['watching'][uuid].update(form.data)
|
||||
datastore.data['watching'][uuid].update(extra_update_obj)
|
||||
@@ -708,12 +714,11 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
output = render_template("edit.html",
|
||||
available_processors=processors.available_processors(),
|
||||
browser_steps_config=browser_step_ui_config,
|
||||
current_base_url=datastore.data['settings']['application']['base_url'],
|
||||
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
|
||||
form=form,
|
||||
has_default_notification_urls=True if len(datastore.data['settings']['application']['notification_urls']) else False,
|
||||
has_empty_checktime=using_default_check_time,
|
||||
has_extra_headers_file=watch.has_extra_headers_file or datastore.has_extra_headers_file,
|
||||
has_extra_headers_file=len(datastore.get_all_headers_in_textfile_for_watch(uuid=uuid)) > 0,
|
||||
is_html_webdriver=is_html_webdriver,
|
||||
jq_support=jq_support,
|
||||
playwright_enabled=os.getenv('PLAYWRIGHT_DRIVER_URL', False),
|
||||
@@ -798,7 +803,6 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
output = render_template("settings.html",
|
||||
form=form,
|
||||
current_base_url = datastore.data['settings']['application']['base_url'],
|
||||
hide_remove_pass=os.getenv("SALTED_PASS", False),
|
||||
api_key=datastore.data['settings']['application'].get('api_access_token'),
|
||||
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
|
||||
@@ -1110,8 +1114,8 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
os.path.join(datastore_o.datastore_path, list_with_tags_file), "w"
|
||||
) as f:
|
||||
for uuid in datastore.data["watching"]:
|
||||
url = datastore.data["watching"][uuid]["url"]
|
||||
tag = datastore.data["watching"][uuid]["tag"]
|
||||
url = datastore.data["watching"][uuid].get('url')
|
||||
tag = datastore.data["watching"][uuid].get('tags', {})
|
||||
f.write("{} {}\r\n".format(url, tag))
|
||||
|
||||
# Add it to the Zip
|
||||
@@ -1199,7 +1203,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
add_paused = request.form.get('edit_and_watch_submit_button') != None
|
||||
processor = request.form.get('processor', 'text_json_diff')
|
||||
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip(), extras={'paused': add_paused, 'processor': processor})
|
||||
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tags').strip(), extras={'paused': add_paused, 'processor': processor})
|
||||
|
||||
if new_uuid:
|
||||
if add_paused:
|
||||
@@ -1264,12 +1268,14 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
||||
i = 1
|
||||
|
||||
elif tag != None:
|
||||
elif tag:
|
||||
# Items that have this current tag
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if (tag != None and tag in watch['tag']):
|
||||
if tag in watch.get('tags', {}):
|
||||
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False}))
|
||||
update_q.put(
|
||||
queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False})
|
||||
)
|
||||
i += 1
|
||||
|
||||
else:
|
||||
@@ -1357,6 +1363,18 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
datastore.data['watching'][uuid.strip()]['notification_format'] = default_notification_format_for_watch
|
||||
flash("{} watches set to use default notification settings".format(len(uuids)))
|
||||
|
||||
elif (op == 'assign-tag'):
|
||||
op_extradata = request.form.get('op_extradata', '').strip()
|
||||
if op_extradata:
|
||||
tag_uuid = datastore.add_tag(name=op_extradata)
|
||||
if op_extradata and tag_uuid:
|
||||
for uuid in uuids:
|
||||
uuid = uuid.strip()
|
||||
if datastore.data['watching'].get(uuid):
|
||||
datastore.data['watching'][uuid]['tags'].append(tag_uuid)
|
||||
|
||||
flash("{} watches assigned tag".format(len(uuids)))
|
||||
|
||||
return redirect(url_for('index'))
|
||||
|
||||
@app.route("/api/share-url", methods=['GET'])
|
||||
@@ -1366,7 +1384,6 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
the share-link can be imported/added"""
|
||||
import requests
|
||||
import json
|
||||
tag = request.args.get('tag')
|
||||
uuid = request.args.get('uuid')
|
||||
|
||||
# more for testing
|
||||
@@ -1413,12 +1430,39 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
# paste in etc
|
||||
return redirect(url_for('index'))
|
||||
|
||||
@app.route("/highlight_submit_ignore_url", methods=['POST'])
|
||||
def highlight_submit_ignore_url():
|
||||
import re
|
||||
mode = request.form.get('mode')
|
||||
selection = request.form.get('selection')
|
||||
|
||||
uuid = request.args.get('uuid','')
|
||||
if datastore.data["watching"].get(uuid):
|
||||
if mode == 'exact':
|
||||
for l in selection.splitlines():
|
||||
datastore.data["watching"][uuid]['ignore_text'].append(l.strip())
|
||||
elif mode == 'digit-regex':
|
||||
for l in selection.splitlines():
|
||||
# Replace any series of numbers with a regex
|
||||
s = re.escape(l.strip())
|
||||
s = re.sub(r'[0-9]+', r'\\d+', s)
|
||||
datastore.data["watching"][uuid]['ignore_text'].append('/' + s + '/')
|
||||
|
||||
return f"<a href={url_for('preview_page', uuid=uuid)}>Click to preview</a>"
|
||||
|
||||
|
||||
import changedetectionio.blueprint.browser_steps as browser_steps
|
||||
app.register_blueprint(browser_steps.construct_blueprint(datastore), url_prefix='/browser-steps')
|
||||
|
||||
import changedetectionio.blueprint.price_data_follower as price_data_follower
|
||||
app.register_blueprint(price_data_follower.construct_blueprint(datastore, update_q), url_prefix='/price_data_follower')
|
||||
|
||||
import changedetectionio.blueprint.tags as tags
|
||||
app.register_blueprint(tags.construct_blueprint(datastore), url_prefix='/tags')
|
||||
|
||||
import changedetectionio.blueprint.check_proxies as check_proxies
|
||||
app.register_blueprint(check_proxies.construct_blueprint(datastore=datastore), url_prefix='/check_proxy')
|
||||
|
||||
|
||||
# @todo handle ctrl break
|
||||
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import os
|
||||
from distutils.util import strtobool
|
||||
|
||||
from flask_expects_json import expects_json
|
||||
from changedetectionio import queuedWatchMetaData
|
||||
from flask_restful import abort, Resource
|
||||
@@ -33,7 +36,7 @@ class Watch(Resource):
|
||||
@auth.check_token
|
||||
def get(self, uuid):
|
||||
"""
|
||||
@api {get} /api/v1/watch/:uuid Get a single watch data
|
||||
@api {get} /api/v1/watch/:uuid Single watch - get data, recheck, pause, mute.
|
||||
@apiDescription Retrieve watch information and set muted/paused status
|
||||
@apiExample {curl} Example usage:
|
||||
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -H"x-api-key:813031b16330fe25e3780cf0325daa45"
|
||||
@@ -209,7 +212,9 @@ class CreateWatch(Resource):
|
||||
json_data = request.get_json()
|
||||
url = json_data['url'].strip()
|
||||
|
||||
if not validators.url(json_data['url'].strip()):
|
||||
# If hosts that only contain alphanumerics are allowed ("localhost" for example)
|
||||
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
|
||||
if not validators.url(url, simple_host=allow_simplehost):
|
||||
return "Invalid or unsupported URL", 400
|
||||
|
||||
if json_data.get('proxy'):
|
||||
@@ -218,9 +223,16 @@ class CreateWatch(Resource):
|
||||
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
|
||||
|
||||
extras = copy.deepcopy(json_data)
|
||||
|
||||
# Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API)
|
||||
tags = None
|
||||
if extras.get('tag'):
|
||||
tags = extras.get('tag')
|
||||
del extras['tag']
|
||||
|
||||
del extras['url']
|
||||
|
||||
new_uuid = self.datastore.add_watch(url=url, extras=extras)
|
||||
new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags)
|
||||
if new_uuid:
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
|
||||
return {'uuid': new_uuid}, 201
|
||||
@@ -259,13 +271,16 @@ class CreateWatch(Resource):
|
||||
"""
|
||||
list = {}
|
||||
|
||||
tag_limit = request.args.get('tag', None)
|
||||
for k, watch in self.datastore.data['watching'].items():
|
||||
if tag_limit:
|
||||
if not tag_limit.lower() in watch.all_tags:
|
||||
continue
|
||||
tag_limit = request.args.get('tag', '').lower()
|
||||
|
||||
list[k] = {'url': watch['url'],
|
||||
|
||||
for uuid, watch in self.datastore.data['watching'].items():
|
||||
# Watch tags by name (replace the other calls?)
|
||||
tags = self.datastore.get_all_tags_for_watch(uuid=uuid)
|
||||
if tag_limit and not any(v.get('title').lower() == tag_limit for k, v in tags.items()):
|
||||
continue
|
||||
|
||||
list[uuid] = {'url': watch['url'],
|
||||
'title': watch['title'],
|
||||
'last_checked': watch['last_checked'],
|
||||
'last_changed': watch.last_changed,
|
||||
|
||||
118
changedetectionio/blueprint/check_proxies/__init__.py
Normal file
118
changedetectionio/blueprint/check_proxies/__init__.py
Normal file
@@ -0,0 +1,118 @@
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from flask import Blueprint
|
||||
from flask_login import login_required
|
||||
|
||||
from changedetectionio.processors import text_json_diff
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
|
||||
|
||||
STATUS_CHECKING = 0
|
||||
STATUS_FAILED = 1
|
||||
STATUS_OK = 2
|
||||
THREADPOOL_MAX_WORKERS = 3
|
||||
_DEFAULT_POOL = ThreadPoolExecutor(max_workers=THREADPOOL_MAX_WORKERS)
|
||||
|
||||
|
||||
# Maybe use fetch-time if its >5 to show some expected load time?
|
||||
def threadpool(f, executor=None):
|
||||
@wraps(f)
|
||||
def wrap(*args, **kwargs):
|
||||
return (executor or _DEFAULT_POOL).submit(f, *args, **kwargs)
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
check_proxies_blueprint = Blueprint('check_proxies', __name__)
|
||||
checks_in_progress = {}
|
||||
|
||||
@threadpool
|
||||
def long_task(uuid, preferred_proxy):
|
||||
import time
|
||||
from changedetectionio import content_fetcher
|
||||
|
||||
status = {'status': '', 'length': 0, 'text': ''}
|
||||
from jinja2 import Environment, BaseLoader
|
||||
|
||||
contents = ''
|
||||
now = time.time()
|
||||
try:
|
||||
update_handler = text_json_diff.perform_site_check(datastore=datastore)
|
||||
changed_detected, update_obj, contents = update_handler.run(uuid, preferred_proxy=preferred_proxy, skip_when_checksum_same=False)
|
||||
# title, size is len contents not len xfer
|
||||
except content_fetcher.Non200ErrorCodeReceived as e:
|
||||
if e.status_code == 404:
|
||||
status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but 404 (page not found)"})
|
||||
elif e.status_code == 403 or e.status_code == 401:
|
||||
status.update({'status': 'ERROR', 'length': len(contents), 'text': f"{e.status_code} - Access denied"})
|
||||
else:
|
||||
status.update({'status': 'ERROR', 'length': len(contents), 'text': f"Status code: {e.status_code}"})
|
||||
except text_json_diff.FilterNotFoundInResponse:
|
||||
status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but CSS/xPath filter not found (page changed layout?)"})
|
||||
except content_fetcher.EmptyReply as e:
|
||||
if e.status_code == 403 or e.status_code == 401:
|
||||
status.update({'status': 'ERROR OTHER', 'length': len(contents), 'text': f"Got empty reply with code {e.status_code} - Access denied"})
|
||||
else:
|
||||
status.update({'status': 'ERROR OTHER', 'length': len(contents) if contents else 0, 'text': f"Empty reply with code {e.status_code}, needs chrome?"})
|
||||
except content_fetcher.ReplyWithContentButNoText as e:
|
||||
txt = f"Got reply but with no content - Status code {e.status_code} - It's possible that the filters were found, but contained no usable text (or contained only an image)."
|
||||
status.update({'status': 'ERROR', 'text': txt})
|
||||
except Exception as e:
|
||||
status.update({'status': 'ERROR OTHER', 'length': len(contents) if contents else 0, 'text': 'Error: '+type(e).__name__+str(e)})
|
||||
else:
|
||||
status.update({'status': 'OK', 'length': len(contents), 'text': ''})
|
||||
|
||||
if status.get('text'):
|
||||
status['text'] = Environment(loader=BaseLoader()).from_string('{{text|e}}').render({'text': status['text']})
|
||||
|
||||
status['time'] = "{:.2f}s".format(time.time() - now)
|
||||
|
||||
return status
|
||||
|
||||
def _recalc_check_status(uuid):
|
||||
|
||||
results = {}
|
||||
for k, v in checks_in_progress.get(uuid, {}).items():
|
||||
try:
|
||||
r_1 = v.result(timeout=0.05)
|
||||
except Exception as e:
|
||||
# If timeout error?
|
||||
results[k] = {'status': 'RUNNING'}
|
||||
|
||||
else:
|
||||
results[k] = r_1
|
||||
|
||||
return results
|
||||
|
||||
@login_required
|
||||
@check_proxies_blueprint.route("/<string:uuid>/status", methods=['GET'])
|
||||
def get_recheck_status(uuid):
|
||||
results = _recalc_check_status(uuid=uuid)
|
||||
return results
|
||||
|
||||
@login_required
|
||||
@check_proxies_blueprint.route("/<string:uuid>/start", methods=['GET'])
|
||||
def start_check(uuid):
|
||||
|
||||
if not datastore.proxy_list:
|
||||
return
|
||||
|
||||
if checks_in_progress.get(uuid):
|
||||
state = _recalc_check_status(uuid=uuid)
|
||||
for proxy_key, v in state.items():
|
||||
if v.get('status') == 'RUNNING':
|
||||
return state
|
||||
else:
|
||||
checks_in_progress[uuid] = {}
|
||||
|
||||
for k, v in datastore.proxy_list.items():
|
||||
if not checks_in_progress[uuid].get(k):
|
||||
checks_in_progress[uuid][k] = long_task(uuid=uuid, preferred_proxy=k)
|
||||
|
||||
results = _recalc_check_status(uuid=uuid)
|
||||
return results
|
||||
|
||||
return check_proxies_blueprint
|
||||
9
changedetectionio/blueprint/tags/README.md
Normal file
9
changedetectionio/blueprint/tags/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Groups tags
|
||||
|
||||
## How it works
|
||||
|
||||
Watch has a list() of tag UUID's, which relate to a config under application.settings.tags
|
||||
|
||||
The 'tag' is actually a watch, because they basically will eventually share 90% of the same config.
|
||||
|
||||
So a tag is like an abstract of a watch
|
||||
141
changedetectionio/blueprint/tags/__init__.py
Normal file
141
changedetectionio/blueprint/tags/__init__.py
Normal file
@@ -0,0 +1,141 @@
|
||||
from flask import Blueprint, request, make_response, render_template, flash, url_for, redirect
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
from changedetectionio import login_optionally_required
|
||||
|
||||
|
||||
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
tags_blueprint = Blueprint('tags', __name__, template_folder="templates")
|
||||
|
||||
@tags_blueprint.route("/list", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def tags_overview_page():
|
||||
from .form import SingleTag
|
||||
add_form = SingleTag(request.form)
|
||||
output = render_template("groups-overview.html",
|
||||
form=add_form,
|
||||
available_tags=datastore.data['settings']['application'].get('tags', {}),
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
@tags_blueprint.route("/add", methods=['POST'])
|
||||
@login_optionally_required
|
||||
def form_tag_add():
|
||||
from .form import SingleTag
|
||||
add_form = SingleTag(request.form)
|
||||
|
||||
if not add_form.validate():
|
||||
for widget, l in add_form.errors.items():
|
||||
flash(','.join(l), 'error')
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
title = request.form.get('name').strip()
|
||||
|
||||
if datastore.tag_exists_by_name(title):
|
||||
flash(f'The tag "{title}" already exists', "error")
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
datastore.add_tag(title)
|
||||
flash("Tag added")
|
||||
|
||||
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
@tags_blueprint.route("/mute/<string:uuid>", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def mute(uuid):
|
||||
if datastore.data['settings']['application']['tags'].get(uuid):
|
||||
datastore.data['settings']['application']['tags'][uuid]['notification_muted'] = not datastore.data['settings']['application']['tags'][uuid]['notification_muted']
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
@tags_blueprint.route("/delete/<string:uuid>", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def delete(uuid):
|
||||
removed = 0
|
||||
# Delete the tag, and any tag reference
|
||||
if datastore.data['settings']['application']['tags'].get(uuid):
|
||||
del datastore.data['settings']['application']['tags'][uuid]
|
||||
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if watch.get('tags') and uuid in watch['tags']:
|
||||
removed += 1
|
||||
watch['tags'].remove(uuid)
|
||||
|
||||
flash(f"Tag deleted and removed from {removed} watches")
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
@tags_blueprint.route("/unlink/<string:uuid>", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def unlink(uuid):
|
||||
unlinked = 0
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if watch.get('tags') and uuid in watch['tags']:
|
||||
unlinked += 1
|
||||
watch['tags'].remove(uuid)
|
||||
|
||||
flash(f"Tag unlinked removed from {unlinked} watches")
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
@tags_blueprint.route("/delete_all", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def delete_all():
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
watch['tags'] = []
|
||||
datastore.data['settings']['application']['tags'] = {}
|
||||
|
||||
flash(f"All tags deleted")
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
@tags_blueprint.route("/edit/<string:uuid>", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def form_tag_edit(uuid):
|
||||
from changedetectionio import forms
|
||||
|
||||
if uuid == 'first':
|
||||
uuid = list(datastore.data['settings']['application']['tags'].keys()).pop()
|
||||
|
||||
default = datastore.data['settings']['application']['tags'].get(uuid)
|
||||
|
||||
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||
data=default,
|
||||
)
|
||||
form.datastore=datastore # needed?
|
||||
|
||||
output = render_template("edit-tag.html",
|
||||
data=default,
|
||||
form=form,
|
||||
settings_application=datastore.data['settings']['application'],
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
@tags_blueprint.route("/edit/<string:uuid>", methods=['POST'])
|
||||
@login_optionally_required
|
||||
def form_tag_edit_submit(uuid):
|
||||
from changedetectionio import forms
|
||||
if uuid == 'first':
|
||||
uuid = list(datastore.data['settings']['application']['tags'].keys()).pop()
|
||||
|
||||
default = datastore.data['settings']['application']['tags'].get(uuid)
|
||||
|
||||
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||
data=default,
|
||||
)
|
||||
# @todo subclass form so validation works
|
||||
#if not form.validate():
|
||||
# for widget, l in form.errors.items():
|
||||
# flash(','.join(l), 'error')
|
||||
# return redirect(url_for('tags.form_tag_edit_submit', uuid=uuid))
|
||||
|
||||
datastore.data['settings']['application']['tags'][uuid].update(form.data)
|
||||
datastore.needs_write_urgent = True
|
||||
flash("Updated")
|
||||
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
|
||||
@tags_blueprint.route("/delete/<string:uuid>", methods=['GET'])
|
||||
def form_tag_delete(uuid):
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
return tags_blueprint
|
||||
22
changedetectionio/blueprint/tags/form.py
Normal file
22
changedetectionio/blueprint/tags/form.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from wtforms import (
|
||||
BooleanField,
|
||||
Form,
|
||||
IntegerField,
|
||||
RadioField,
|
||||
SelectField,
|
||||
StringField,
|
||||
SubmitField,
|
||||
TextAreaField,
|
||||
validators,
|
||||
)
|
||||
|
||||
|
||||
|
||||
class SingleTag(Form):
|
||||
|
||||
name = StringField('Tag name', [validators.InputRequired()], render_kw={"placeholder": "Name"})
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||
|
||||
|
||||
|
||||
|
||||
135
changedetectionio/blueprint/tags/templates/edit-tag.html
Normal file
135
changedetectionio/blueprint/tags/templates/edit-tag.html
Normal file
@@ -0,0 +1,135 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||
<script>
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||
</script>
|
||||
|
||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<script>
|
||||
|
||||
/*{% if emailprefix %}*/
|
||||
/*const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');*/
|
||||
/*{% endif %}*/
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
<script src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script>
|
||||
<!--<script src="{{url_for('static_content', group='js', filename='limit.js')}}" defer></script>-->
|
||||
<script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||
|
||||
<div class="edit-form monospaced-textarea">
|
||||
|
||||
<div class="tabs collapsable">
|
||||
<ul>
|
||||
<li class="tab" id=""><a href="#general">General</a></li>
|
||||
<li class="tab"><a href="#filters-and-triggers">Filters & Triggers</a></li>
|
||||
<li class="tab"><a href="#notifications">Notifications</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="box-wrap inner">
|
||||
<form class="pure-form pure-form-stacked"
|
||||
action="{{ url_for('tags.form_tag_edit', uuid=data.uuid) }}" method="POST">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
|
||||
<div class="tab-pane-inner" id="general">
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.title, placeholder="https://...", required=true, class="m-d") }}
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="filters-and-triggers">
|
||||
<div class="pure-control-group">
|
||||
{% set field = render_field(form.include_filters,
|
||||
rows=5,
|
||||
placeholder="#example
|
||||
xpath://body/div/span[contains(@class, 'example-class')]",
|
||||
class="m-d")
|
||||
%}
|
||||
{{ field }}
|
||||
{% if '/text()' in field %}
|
||||
<span class="pure-form-message-inline"><strong>Note!: //text() function does not work where the <element> contains <![CDATA[]]></strong></span><br>
|
||||
{% endif %}
|
||||
<span class="pure-form-message-inline">One rule per line, <i>any</i> rules that matches will be used.<br>
|
||||
|
||||
<ul>
|
||||
<li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li>
|
||||
<li>JSON - Limit text to this JSON rule, using either <a href="https://pypi.org/project/jsonpath-ng/" target="new">JSONPath</a> or <a href="https://stedolan.github.io/jq/" target="new">jq</a> (if installed).
|
||||
<ul>
|
||||
<li>JSONPath: Prefix with <code>json:</code>, use <code>json:$</code> to force re-formatting if required, <a href="https://jsonpath.com/" target="new">test your JSONPath here</a>.</li>
|
||||
{% if jq_support %}
|
||||
<li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>.</li>
|
||||
{% else %}
|
||||
<li>jq support not installed</li>
|
||||
{% endif %}
|
||||
</ul>
|
||||
</li>
|
||||
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash,
|
||||
<ul>
|
||||
<li>Example: <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
|
||||
href="http://xpather.com/" target="new">test your XPath here</a></li>
|
||||
<li>Example: Get all titles from an RSS feed <code>//title/text()</code></li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
Please be sure that you thoroughly understand how to write CSS, JSONPath, XPath{% if jq_support %}, or jq selector{%endif%} rules before filing an issue on GitHub! <a
|
||||
href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br>
|
||||
</span>
|
||||
</div>
|
||||
<fieldset class="pure-control-group">
|
||||
{{ render_field(form.subtractive_selectors, rows=5, placeholder="header
|
||||
footer
|
||||
nav
|
||||
.stockticker") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li> Remove HTML element(s) by CSS selector before text conversion. </li>
|
||||
<li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li>
|
||||
</ul>
|
||||
</span>
|
||||
</fieldset>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="notifications">
|
||||
<fieldset>
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_checkbox_field(form.notification_muted) }}
|
||||
</div>
|
||||
{% if is_html_webdriver %}
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_checkbox_field(form.notification_screenshot) }}
|
||||
<span class="pure-form-message-inline">
|
||||
<strong>Use with caution!</strong> This will easily fill up your email storage quota or flood other storages.
|
||||
</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="field-group" id="notification-field-group">
|
||||
{% if has_default_notification_urls %}
|
||||
<div class="inline-warning">
|
||||
<img class="inline-warning-icon" src="{{url_for('static_content', group='images', filename='notice.svg')}}" alt="Look out!" title="Lookout!" >
|
||||
There are <a href="{{ url_for('settings_page')}}#notifications">system-wide notification URLs enabled</a>, this form will override notification settings for this watch only ‐ an empty Notification URL list here will still send notifications.
|
||||
</div>
|
||||
{% endif %}
|
||||
<a href="#notifications" id="notification-setting-reset-to-default" class="pure-button button-xsmall" style="right: 20px; top: 20px; position: absolute; background-color: #5f42dd; border-radius: 4px; font-size: 70%; color: #fff">Use system defaults</a>
|
||||
|
||||
{{ render_common_settings_form(form, emailprefix, settings_application) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div id="actions">
|
||||
<div class="pure-control-group">
|
||||
{{ render_button(form.save_button) }}
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
@@ -0,0 +1,60 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.jinja' import render_simple_field, render_field %}
|
||||
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
||||
|
||||
<div class="box">
|
||||
<form class="pure-form" action="{{ url_for('tags.form_tag_add') }}" method="POST" id="new-watch-form">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||
<fieldset>
|
||||
<legend>Add a new organisational tag</legend>
|
||||
<div id="watch-add-wrapper-zone">
|
||||
<div>
|
||||
{{ render_simple_field(form.name, placeholder="watch label / tag") }}
|
||||
</div>
|
||||
<div>
|
||||
{{ render_simple_field(form.save_button, title="Save" ) }}
|
||||
</div>
|
||||
</div>
|
||||
<br>
|
||||
<div style="color: #fff;">Groups allows you to manage filters and notifications for multiple watches under a single organisational tag.</div>
|
||||
</fieldset>
|
||||
</form>
|
||||
<!-- @todo maybe some overview matrix, 'tick' with which has notification, filter rules etc -->
|
||||
<div id="watch-table-wrapper">
|
||||
|
||||
<table class="pure-table pure-table-striped watch-table group-overview-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>Tag / Label name</th>
|
||||
<th></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<!--
|
||||
@Todo - connect Last checked, Last Changed, Number of Watches etc
|
||||
--->
|
||||
{% if not available_tags|length %}
|
||||
<tr>
|
||||
<td colspan="3">No website organisational tags/groups configured</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% for uuid, tag in available_tags.items() %}
|
||||
<tr id="{{ uuid }}" class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }}">
|
||||
<td class="watch-controls">
|
||||
<a class="link-mute state-{{'on' if tag.notification_muted else 'off'}}" href="{{url_for('tags.mute', uuid=tag.uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications" class="icon icon-mute" ></a>
|
||||
</td>
|
||||
<td class="title-col inline">{{tag.title}}</td>
|
||||
<td>
|
||||
<a class="pure-button pure-button-primary" href="{{ url_for('tags.form_tag_edit', uuid=uuid) }}">Edit</a>
|
||||
<a class="pure-button pure-button-primary" href="{{ url_for('tags.delete', uuid=uuid) }}" title="Deletes and removes tag">Delete</a>
|
||||
<a class="pure-button pure-button-primary" href="{{ url_for('tags.unlink', uuid=uuid) }}" title="Keep the tag but unlink any watches">Unlink</a>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -1,12 +1,15 @@
|
||||
import hashlib
|
||||
from abc import abstractmethod
|
||||
from distutils.util import strtobool
|
||||
from urllib.parse import urlparse
|
||||
import chardet
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import requests
|
||||
import sys
|
||||
import time
|
||||
import urllib.parse
|
||||
|
||||
visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4, header, footer, section, article, aside, details, main, nav, section, summary'
|
||||
|
||||
@@ -77,11 +80,13 @@ class ScreenshotUnavailable(Exception):
|
||||
|
||||
|
||||
class ReplyWithContentButNoText(Exception):
|
||||
def __init__(self, status_code, url, screenshot=None):
|
||||
def __init__(self, status_code, url, screenshot=None, has_filters=False, html_content=''):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
self.screenshot = screenshot
|
||||
self.has_filters = has_filters
|
||||
self.html_content = html_content
|
||||
return
|
||||
|
||||
|
||||
@@ -201,7 +206,8 @@ class Fetcher():
|
||||
dest = os.path.join(self.browser_steps_screenshot_path, 'step_*.jpeg')
|
||||
files = glob.glob(dest)
|
||||
for f in files:
|
||||
os.unlink(f)
|
||||
if os.path.isfile(f):
|
||||
os.unlink(f)
|
||||
|
||||
|
||||
# Maybe for the future, each fetcher provides its own diff output, could be used for text, image
|
||||
@@ -263,7 +269,6 @@ class base_html_playwright(Fetcher):
|
||||
|
||||
if self.proxy:
|
||||
# Playwright needs separate username and password values
|
||||
from urllib.parse import urlparse
|
||||
parsed = urlparse(self.proxy.get('server'))
|
||||
if parsed.username:
|
||||
self.proxy['username'] = parsed.username
|
||||
@@ -318,14 +323,13 @@ class base_html_playwright(Fetcher):
|
||||
|
||||
# Append proxy connect string
|
||||
if self.proxy:
|
||||
import urllib.parse
|
||||
# Remove username/password if it exists in the URL or you will receive "ERR_NO_SUPPORTED_PROXIES" error
|
||||
# Actual authentication handled by Puppeteer/node
|
||||
o = urlparse(self.proxy.get('server'))
|
||||
proxy_url = urllib.parse.quote(o._replace(netloc="{}:{}".format(o.hostname, o.port)).geturl())
|
||||
# Remove scheme, socks5:// doesnt always work and it will autodetect anyway
|
||||
proxy_url = urllib.parse.quote(o._replace(netloc="{}:{}".format(o.hostname, o.port)).geturl().replace(f"{o.scheme}://", '', 1))
|
||||
browserless_function_url = f"{browserless_function_url}&--proxy-server={proxy_url}&dumpio=true"
|
||||
|
||||
|
||||
try:
|
||||
amp = '&' if '?' in browserless_function_url else '?'
|
||||
response = requests.request(
|
||||
@@ -342,9 +346,9 @@ class base_html_playwright(Fetcher):
|
||||
'req_headers': request_headers,
|
||||
'screenshot_quality': int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)),
|
||||
'url': url,
|
||||
'user_agent': request_headers.get('User-Agent', 'Mozilla/5.0'),
|
||||
'proxy_username': self.proxy.get('username','') if self.proxy else False,
|
||||
'proxy_password': self.proxy.get('password', '') if self.proxy else False,
|
||||
'user_agent': {k.lower(): v for k, v in request_headers.items()}.get('user-agent', None),
|
||||
'proxy_username': self.proxy.get('username', '') if self.proxy else False,
|
||||
'proxy_password': self.proxy.get('password', '') if self.proxy and self.proxy.get('username') else False,
|
||||
'no_cache_list': [
|
||||
'twitter',
|
||||
'.pdf'
|
||||
@@ -413,8 +417,8 @@ class base_html_playwright(Fetcher):
|
||||
lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
|
||||
self.browser_steps))
|
||||
|
||||
if not has_browser_steps:
|
||||
if os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH'):
|
||||
if not has_browser_steps and os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH'):
|
||||
if strtobool(os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH')):
|
||||
# Temporary backup solution until we rewrite the playwright code
|
||||
return self.run_fetch_browserless_puppeteer(
|
||||
url,
|
||||
@@ -431,6 +435,7 @@ class base_html_playwright(Fetcher):
|
||||
|
||||
self.delete_browser_steps_screenshots()
|
||||
response = None
|
||||
|
||||
with sync_playwright() as p:
|
||||
browser_type = getattr(p, self.browser_type)
|
||||
|
||||
@@ -439,10 +444,13 @@ class base_html_playwright(Fetcher):
|
||||
# 60,000 connection timeout only
|
||||
browser = browser_type.connect_over_cdp(self.command_executor, timeout=60000)
|
||||
|
||||
# SOCKS5 with authentication is not supported (yet)
|
||||
# https://github.com/microsoft/playwright/issues/10567
|
||||
|
||||
# Set user agent to prevent Cloudflare from blocking the browser
|
||||
# Use the default one configured in the App.py model that's passed from fetch_site_status.py
|
||||
context = browser.new_context(
|
||||
user_agent=request_headers.get('User-Agent', 'Mozilla/5.0'),
|
||||
user_agent={k.lower(): v for k, v in request_headers.items()}.get('user-agent', None),
|
||||
proxy=self.proxy,
|
||||
# This is needed to enable JavaScript execution on GitHub and others
|
||||
bypass_csp=True,
|
||||
@@ -475,7 +483,6 @@ class base_html_playwright(Fetcher):
|
||||
print("Content Fetcher > retrying request got error - ", str(e))
|
||||
time.sleep(1)
|
||||
response = self.page.goto(url, wait_until='commit')
|
||||
|
||||
except Exception as e:
|
||||
print("Content Fetcher > Other exception when page.goto", str(e))
|
||||
context.close()
|
||||
@@ -594,6 +601,24 @@ class base_html_webdriver(Fetcher):
|
||||
proxy_args['httpProxy'] = proxy_override
|
||||
|
||||
if proxy_args:
|
||||
|
||||
# Convert socks5:// to selenium SOCKS5 correct config
|
||||
o = urlparse(proxy_args.get('httpProxy', ''))
|
||||
# https://github.com/tebeka/selenium/issues/167
|
||||
# https://github.com/SeleniumHQ/selenium/issues/6917
|
||||
if o.scheme == 'socks5':
|
||||
if proxy_args.get('httpProxy'):
|
||||
del(proxy_args['httpProxy'])
|
||||
path = "/" + o.path if o.path else ''
|
||||
if not o.port:
|
||||
raise Exception("No Port number specific in this proxy config URL")
|
||||
proxy_args['socksProxy'] = f"{o.hostname}:{o.port}{path}"
|
||||
proxy_args['socksVersion'] = 5
|
||||
proxy_args['proxyType'] = 'MANUAL'
|
||||
if o.username:
|
||||
proxy_args['socksUsername'] = o.username
|
||||
proxy_args['socksPassword'] = o.password
|
||||
|
||||
self.proxy = SeleniumProxy(raw=proxy_args)
|
||||
|
||||
def run(self,
|
||||
@@ -611,7 +636,6 @@ class base_html_webdriver(Fetcher):
|
||||
from selenium.common.exceptions import WebDriverException
|
||||
# request_body, request_method unused for now, until some magic in the future happens.
|
||||
|
||||
# check env for WEBDRIVER_URL
|
||||
self.driver = webdriver.Remote(
|
||||
command_executor=self.command_executor,
|
||||
desired_capabilities=DesiredCapabilities.CHROME,
|
||||
@@ -683,13 +707,17 @@ class html_requests(Fetcher):
|
||||
is_binary=False):
|
||||
|
||||
# Make requests use a more modern looking user-agent
|
||||
if not 'User-Agent' in request_headers:
|
||||
if not {k.lower(): v for k, v in request_headers.items()}.get('user-agent', None):
|
||||
request_headers['User-Agent'] = os.getenv("DEFAULT_SETTINGS_HEADERS_USERAGENT",
|
||||
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36')
|
||||
|
||||
proxies = {}
|
||||
|
||||
# Allows override the proxy on a per-request basis
|
||||
|
||||
# https://requests.readthedocs.io/en/latest/user/advanced/#socks
|
||||
# Should also work with `socks5://user:pass@host:port` type syntax.
|
||||
|
||||
if self.proxy_override:
|
||||
proxies = {'http': self.proxy_override, 'https': self.proxy_override, 'ftp': self.proxy_override}
|
||||
else:
|
||||
|
||||
@@ -35,15 +35,19 @@ def customSequenceMatcher(before, after, include_equal=False, include_removed=Tr
|
||||
|
||||
# only_differences - only return info about the differences, no context
|
||||
# line_feed_sep could be "<br>" or "<li>" or "\n" etc
|
||||
def render_diff(previous_version_file_contents, newest_version_file_contents, include_equal=False, include_removed=True, include_added=True, include_replaced=True, line_feed_sep="\n", include_change_type_prefix=True):
|
||||
def render_diff(previous_version_file_contents, newest_version_file_contents, include_equal=False, include_removed=True, include_added=True, include_replaced=True, line_feed_sep="\n", include_change_type_prefix=True, patch_format=False):
|
||||
|
||||
newest_version_file_contents = [line.rstrip() for line in newest_version_file_contents.splitlines()]
|
||||
|
||||
if previous_version_file_contents:
|
||||
previous_version_file_contents = [line.rstrip() for line in previous_version_file_contents.splitlines()]
|
||||
previous_version_file_contents = [line.rstrip() for line in previous_version_file_contents.splitlines()]
|
||||
else:
|
||||
previous_version_file_contents = ""
|
||||
|
||||
if patch_format:
|
||||
patch = difflib.unified_diff(previous_version_file_contents, newest_version_file_contents)
|
||||
return line_feed_sep.join(patch)
|
||||
|
||||
rendered_diff = customSequenceMatcher(before=previous_version_file_contents,
|
||||
after=newest_version_file_contents,
|
||||
include_equal=include_equal,
|
||||
@@ -54,4 +58,5 @@ def render_diff(previous_version_file_contents, newest_version_file_contents, in
|
||||
|
||||
# Recursively join lists
|
||||
f = lambda L: line_feed_sep.join([f(x) if type(x) is list else x for x in L])
|
||||
return f(rendered_diff)
|
||||
p= f(rendered_diff)
|
||||
return p
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import os
|
||||
import re
|
||||
from distutils.util import strtobool
|
||||
|
||||
from wtforms import (
|
||||
BooleanField,
|
||||
@@ -28,6 +29,8 @@ from changedetectionio.notification import (
|
||||
|
||||
from wtforms.fields import FormField
|
||||
|
||||
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
|
||||
|
||||
valid_method = {
|
||||
'GET',
|
||||
'POST',
|
||||
@@ -90,6 +93,29 @@ class SaltyPasswordField(StringField):
|
||||
else:
|
||||
self.data = False
|
||||
|
||||
class StringTagUUID(StringField):
|
||||
|
||||
# process_formdata(self, valuelist) handled manually in POST handler
|
||||
|
||||
# Is what is shown when field <input> is rendered
|
||||
def _value(self):
|
||||
# Tag UUID to name, on submit it will convert it back (in the submit handler of init.py)
|
||||
if self.data and type(self.data) is list:
|
||||
tag_titles = []
|
||||
for i in self.data:
|
||||
tag = self.datastore.data['settings']['application']['tags'].get(i)
|
||||
if tag:
|
||||
tag_title = tag.get('title')
|
||||
if tag_title:
|
||||
tag_titles.append(tag_title)
|
||||
|
||||
return ', '.join(tag_titles)
|
||||
|
||||
if not self.data:
|
||||
return ''
|
||||
|
||||
return 'error'
|
||||
|
||||
class TimeBetweenCheckForm(Form):
|
||||
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
@@ -203,16 +229,19 @@ class ValidateJinja2Template(object):
|
||||
def __call__(self, form, field):
|
||||
from changedetectionio import notification
|
||||
|
||||
from jinja2 import Environment, BaseLoader, TemplateSyntaxError
|
||||
from jinja2 import Environment, BaseLoader, TemplateSyntaxError, UndefinedError
|
||||
from jinja2.meta import find_undeclared_variables
|
||||
|
||||
|
||||
try:
|
||||
jinja2_env = Environment(loader=BaseLoader)
|
||||
jinja2_env.globals.update(notification.valid_tokens)
|
||||
|
||||
rendered = jinja2_env.from_string(field.data).render()
|
||||
except TemplateSyntaxError as e:
|
||||
raise ValidationError(f"This is not a valid Jinja2 template: {e}") from e
|
||||
except UndefinedError as e:
|
||||
raise ValidationError(f"A variable or function is not defined: {e}") from e
|
||||
|
||||
ast = jinja2_env.parse(field.data)
|
||||
undefined = ", ".join(find_undeclared_variables(ast))
|
||||
@@ -232,9 +261,10 @@ class validateURL(object):
|
||||
|
||||
def __call__(self, form, field):
|
||||
import validators
|
||||
|
||||
# If hosts that only contain alphanumerics are allowed ("localhost" for example)
|
||||
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
|
||||
try:
|
||||
validators.url(field.data.strip())
|
||||
validators.url(field.data.strip(), simple_host=allow_simplehost)
|
||||
except validators.ValidationFailure:
|
||||
message = field.gettext('\'%s\' is not a valid URL.' % (field.data.strip()))
|
||||
raise ValidationError(message)
|
||||
@@ -347,7 +377,7 @@ class quickWatchForm(Form):
|
||||
from . import processors
|
||||
|
||||
url = fields.URLField('URL', validators=[validateURL()])
|
||||
tag = StringField('Group tag', [validators.Optional()])
|
||||
tags = StringTagUUID('Group tag', [validators.Optional()])
|
||||
watch_submit_button = SubmitField('Watch', render_kw={"class": "pure-button pure-button-primary"})
|
||||
processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff")
|
||||
edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"})
|
||||
@@ -355,6 +385,7 @@ class quickWatchForm(Form):
|
||||
|
||||
# Common to a single watch and the global settings
|
||||
class commonSettingsForm(Form):
|
||||
|
||||
notification_urls = StringListField('Notification URL List', validators=[validators.Optional(), ValidateAppRiseServers()])
|
||||
notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()])
|
||||
notification_body = TextAreaField('Notification Body', default='{{ watch_url }} had a change.', validators=[validators.Optional(), ValidateJinja2Template()])
|
||||
@@ -382,7 +413,7 @@ class SingleBrowserStep(Form):
|
||||
class watchForm(commonSettingsForm):
|
||||
|
||||
url = fields.URLField('URL', validators=[validateURL()])
|
||||
tag = StringField('Group tag', [validators.Optional()], default='')
|
||||
tags = StringTagUUID('Group tag', [validators.Optional()], default='')
|
||||
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
|
||||
@@ -450,7 +481,7 @@ class SingleExtraProxy(Form):
|
||||
|
||||
# maybe better to set some <script>var..
|
||||
proxy_name = StringField('Name', [validators.Optional()], render_kw={"placeholder": "Name"})
|
||||
proxy_url = StringField('Proxy URL', [validators.Optional()], render_kw={"placeholder": "http://user:pass@...:3128", "size":50})
|
||||
proxy_url = StringField('Proxy URL', [validators.Optional()], render_kw={"placeholder": "socks5:// or regular proxy http://user:pass@...:3128", "size":50})
|
||||
# @todo do the validation here instead
|
||||
|
||||
# datastore.data['settings']['requests']..
|
||||
@@ -474,7 +505,10 @@ class globalSettingsRequestForm(Form):
|
||||
class globalSettingsApplicationForm(commonSettingsForm):
|
||||
|
||||
api_access_token_enabled = BooleanField('API access token security check enabled', default=True, validators=[validators.Optional()])
|
||||
base_url = StringField('Base URL', validators=[validators.Optional()])
|
||||
base_url = StringField('Notification base URL override',
|
||||
validators=[validators.Optional()],
|
||||
render_kw={"placeholder": os.getenv('BASE_URL', 'Not set')}
|
||||
)
|
||||
empty_pages_are_a_change = BooleanField('Treat empty pages as a change?', default=False)
|
||||
fetch_backend = RadioField('Fetch Method', default="html_requests", choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
global_ignore_text = StringListField('Ignore Text', [ValidateListRegex()])
|
||||
|
||||
@@ -7,17 +7,35 @@ from typing import List
|
||||
import json
|
||||
import re
|
||||
|
||||
|
||||
# HTML added to be sure each result matching a filter (.example) gets converted to a new line by Inscriptis
|
||||
TEXT_FILTER_LIST_LINE_SUFFIX = "<br>"
|
||||
|
||||
PERL_STYLE_REGEX = r'^/(.*?)/([a-z]*)?$'
|
||||
# 'price' , 'lowPrice', 'highPrice' are usually under here
|
||||
# all of those may or may not appear on different websites
|
||||
LD_JSON_PRODUCT_OFFER_SELECTOR = "json:$..offers"
|
||||
# All of those may or may not appear on different websites - I didnt find a way todo case-insensitive searching here
|
||||
LD_JSON_PRODUCT_OFFER_SELECTORS = ["json:$..offers", "json:$..Offers"]
|
||||
|
||||
class JSONNotFound(ValueError):
|
||||
def __init__(self, msg):
|
||||
ValueError.__init__(self, msg)
|
||||
|
||||
|
||||
|
||||
# Doesn't look like python supports forward slash auto enclosure in re.findall
|
||||
# So convert it to inline flag "(?i)foobar" type configuration
|
||||
def perl_style_slash_enclosed_regex_to_options(regex):
|
||||
|
||||
res = re.search(PERL_STYLE_REGEX, regex, re.IGNORECASE)
|
||||
|
||||
if res:
|
||||
flags = res.group(2) if res.group(2) else 'i'
|
||||
regex = f"(?{flags}){res.group(1)}"
|
||||
else:
|
||||
# Fall back to just ignorecase as an option
|
||||
regex = f"(?i){regex}"
|
||||
|
||||
return regex
|
||||
|
||||
# Given a CSS Rule, and a blob of HTML, return the blob of HTML that matches
|
||||
def include_filters(include_filters, html_content, append_pretty_line_formatting=False):
|
||||
soup = BeautifulSoup(html_content, "html.parser")
|
||||
@@ -144,7 +162,6 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None
|
||||
|
||||
# Foreach <script json></script> blob.. just return the first that matches json_filter
|
||||
# As a last resort, try to parse the whole <body>
|
||||
s = []
|
||||
soup = BeautifulSoup(content, 'html.parser')
|
||||
|
||||
if ensure_is_ldjson_info_type:
|
||||
@@ -170,13 +187,24 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None
|
||||
|
||||
for json_data in bs_jsons:
|
||||
stripped_text_from_html = _parse_json(json_data, json_filter)
|
||||
|
||||
if ensure_is_ldjson_info_type:
|
||||
# Could sometimes be list, string or something else random
|
||||
if isinstance(json_data, dict):
|
||||
# If it has LD JSON 'key' @type, and @type is 'product', and something was found for the search
|
||||
# (Some sites have multiple of the same ld+json @type='product', but some have the review part, some have the 'price' part)
|
||||
if json_data.get('@type', False) and json_data.get('@type','').lower() == ensure_is_ldjson_info_type.lower() and stripped_text_from_html:
|
||||
break
|
||||
# @type could also be a list (Product, SubType)
|
||||
# LD_JSON auto-extract also requires some content PLUS the ldjson to be present
|
||||
# 1833 - could be either str or dict, should not be anything else
|
||||
if json_data.get('@type') and stripped_text_from_html:
|
||||
try:
|
||||
if json_data.get('@type') == str or json_data.get('@type') == dict:
|
||||
types = [json_data.get('@type')] if isinstance(json_data.get('@type'), str) else json_data.get('@type')
|
||||
if ensure_is_ldjson_info_type.lower() in [x.lower().strip() for x in types]:
|
||||
break
|
||||
except:
|
||||
continue
|
||||
|
||||
elif stripped_text_from_html:
|
||||
break
|
||||
|
||||
@@ -191,42 +219,41 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None
|
||||
#
|
||||
# wordlist - list of regex's (str) or words (str)
|
||||
def strip_ignore_text(content, wordlist, mode="content"):
|
||||
ignore = []
|
||||
ignore_regex = []
|
||||
|
||||
# @todo check this runs case insensitive
|
||||
for k in wordlist:
|
||||
|
||||
# Is it a regex?
|
||||
if k[0] == '/':
|
||||
ignore_regex.append(k.strip(" /"))
|
||||
else:
|
||||
ignore.append(k)
|
||||
|
||||
i = 0
|
||||
output = []
|
||||
ignore_text = []
|
||||
ignore_regex = []
|
||||
ignored_line_numbers = []
|
||||
|
||||
for k in wordlist:
|
||||
# Is it a regex?
|
||||
res = re.search(PERL_STYLE_REGEX, k, re.IGNORECASE)
|
||||
if res:
|
||||
ignore_regex.append(re.compile(perl_style_slash_enclosed_regex_to_options(k)))
|
||||
else:
|
||||
ignore_text.append(k.strip())
|
||||
|
||||
for line in content.splitlines():
|
||||
i += 1
|
||||
# Always ignore blank lines in this mode. (when this function gets called)
|
||||
got_match = False
|
||||
if len(line.strip()):
|
||||
regex_matches = False
|
||||
for l in ignore_text:
|
||||
if l.lower() in line.lower():
|
||||
got_match = True
|
||||
|
||||
# if any of these match, skip
|
||||
for regex in ignore_regex:
|
||||
try:
|
||||
if re.search(regex, line, re.IGNORECASE):
|
||||
regex_matches = True
|
||||
except Exception as e:
|
||||
continue
|
||||
if not got_match:
|
||||
for r in ignore_regex:
|
||||
if r.search(line):
|
||||
got_match = True
|
||||
|
||||
if not regex_matches and not any(skip_text.lower() in line.lower() for skip_text in ignore):
|
||||
if not got_match:
|
||||
# Not ignored
|
||||
output.append(line.encode('utf8'))
|
||||
else:
|
||||
ignored_line_numbers.append(i)
|
||||
|
||||
|
||||
|
||||
# Used for finding out what to highlight
|
||||
if mode == "line numbers":
|
||||
return ignored_line_numbers
|
||||
@@ -267,9 +294,18 @@ def html_to_text(html_content: str, render_anchor_tag_content=False) -> str:
|
||||
|
||||
# Does LD+JSON exist with a @type=='product' and a .price set anywhere?
|
||||
def has_ldjson_product_info(content):
|
||||
pricing_data = ''
|
||||
|
||||
try:
|
||||
pricing_data = extract_json_as_string(content=content, json_filter=LD_JSON_PRODUCT_OFFER_SELECTOR, ensure_is_ldjson_info_type="product")
|
||||
except JSONNotFound as e:
|
||||
if not 'application/ld+json' in content:
|
||||
return False
|
||||
|
||||
for filter in LD_JSON_PRODUCT_OFFER_SELECTORS:
|
||||
pricing_data += extract_json_as_string(content=content,
|
||||
json_filter=filter,
|
||||
ensure_is_ldjson_info_type="product")
|
||||
|
||||
except Exception as e:
|
||||
# Totally fine
|
||||
return False
|
||||
x=bool(pricing_data)
|
||||
|
||||
@@ -85,7 +85,8 @@ class import_distill_io_json(Importer):
|
||||
now = time.time()
|
||||
self.new_uuids=[]
|
||||
|
||||
|
||||
# @todo Use JSONSchema like in the API to validate here.
|
||||
|
||||
try:
|
||||
data = json.loads(data.strip())
|
||||
except json.decoder.JSONDecodeError:
|
||||
@@ -120,11 +121,8 @@ class import_distill_io_json(Importer):
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
|
||||
if d.get('tags', False):
|
||||
extras['tag'] = ", ".join(d['tags'])
|
||||
|
||||
new_uuid = datastore.add_watch(url=d['uri'].strip(),
|
||||
tag=",".join(d.get('tags', [])),
|
||||
extras=extras,
|
||||
write_to_disk_now=False)
|
||||
|
||||
|
||||
@@ -43,6 +43,7 @@ class model(dict):
|
||||
'schema_version' : 0,
|
||||
'shared_diff_access': False,
|
||||
'webdriver_delay': None , # Extra delay in seconds before extracting text
|
||||
'tags': {} #@todo use Tag.model initialisers
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
19
changedetectionio/model/Tag.py
Normal file
19
changedetectionio/model/Tag.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from .Watch import base_config
|
||||
import uuid
|
||||
|
||||
class model(dict):
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
|
||||
self.update(base_config)
|
||||
|
||||
self['uuid'] = str(uuid.uuid4())
|
||||
|
||||
if kw.get('default'):
|
||||
self.update(kw['default'])
|
||||
del kw['default']
|
||||
|
||||
|
||||
# Goes at the end so we update the default object with the initialiser
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
|
||||
@@ -52,7 +52,8 @@ base_config = {
|
||||
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
|
||||
'proxy': None, # Preferred proxy connection
|
||||
'subtractive_selectors': [],
|
||||
'tag': None,
|
||||
'tag': '', # Old system of text name for a tag, to be removed
|
||||
'tags': [], # list of UUIDs to App.Tags
|
||||
'text_should_not_be_present': [], # Text that should not present
|
||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||
# Requires setting to None on submit if it's the same as the default
|
||||
@@ -455,10 +456,6 @@ class model(dict):
|
||||
|
||||
return csv_output_filename
|
||||
|
||||
@property
|
||||
# Return list of tags, stripped and lowercase, used for searching
|
||||
def all_tags(self):
|
||||
return [s.strip().lower() for s in self.get('tag','').split(',')]
|
||||
|
||||
def has_special_diff_filter_options_set(self):
|
||||
|
||||
@@ -473,40 +470,6 @@ class model(dict):
|
||||
# None is set
|
||||
return False
|
||||
|
||||
@property
|
||||
def has_extra_headers_file(self):
|
||||
if os.path.isfile(os.path.join(self.watch_data_dir, 'headers.txt')):
|
||||
return True
|
||||
|
||||
for f in self.all_tags:
|
||||
fname = "headers-"+re.sub(r'[\W_]', '', f).lower().strip() + ".txt"
|
||||
filepath = os.path.join(self.__datastore_path, fname)
|
||||
if os.path.isfile(filepath):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def get_all_headers(self):
|
||||
from .App import parse_headers_from_text_file
|
||||
headers = self.get('headers', {}).copy()
|
||||
# Available headers on the disk could 'headers.txt' in the watch data dir
|
||||
filepath = os.path.join(self.watch_data_dir, 'headers.txt')
|
||||
try:
|
||||
if os.path.isfile(filepath):
|
||||
headers.update(parse_headers_from_text_file(filepath))
|
||||
except Exception as e:
|
||||
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||
|
||||
# Or each by tag, as tagname.txt in the main datadir
|
||||
for f in self.all_tags:
|
||||
fname = "headers-"+re.sub(r'[\W_]', '', f).lower().strip() + ".txt"
|
||||
filepath = os.path.join(self.__datastore_path, fname)
|
||||
try:
|
||||
if os.path.isfile(filepath):
|
||||
headers.update(parse_headers_from_text_file(filepath))
|
||||
except Exception as e:
|
||||
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||
return headers
|
||||
|
||||
def get_last_fetched_before_filters(self):
|
||||
import brotli
|
||||
|
||||
@@ -9,6 +9,7 @@ valid_tokens = {
|
||||
'diff': '',
|
||||
'diff_added': '',
|
||||
'diff_full': '',
|
||||
'diff_patch': '',
|
||||
'diff_removed': '',
|
||||
'diff_url': '',
|
||||
'preview_url': '',
|
||||
@@ -92,7 +93,13 @@ def process_notification(n_object, datastore):
|
||||
n_object.get('notification_format', default_notification_format),
|
||||
valid_notification_formats[default_notification_format],
|
||||
)
|
||||
|
||||
|
||||
# If we arrived with 'System default' then look it up
|
||||
if n_format == default_notification_format_for_watch and datastore.data['settings']['application'].get('notification_format') != default_notification_format_for_watch:
|
||||
# Initially text or whatever
|
||||
n_format = datastore.data['settings']['application'].get('notification_format', valid_notification_formats[default_notification_format])
|
||||
|
||||
|
||||
# https://github.com/caronc/apprise/wiki/Development_LogCapture
|
||||
# Anything higher than or equal to WARNING (which covers things like Connection errors)
|
||||
# raise it as an exception
|
||||
@@ -145,9 +152,12 @@ def process_notification(n_object, datastore):
|
||||
# Apprise will default to HTML, so we need to override it
|
||||
# So that whats' generated in n_body is in line with what is going to be sent.
|
||||
# https://github.com/caronc/apprise/issues/633#issuecomment-1191449321
|
||||
if not 'format=' in url and (n_format == 'text' or n_format == 'markdown'):
|
||||
if not 'format=' in url and (n_format == 'Text' or n_format == 'Markdown'):
|
||||
prefix = '?' if not '?' in url else '&'
|
||||
# Apprise format is lowercase text https://github.com/caronc/apprise/issues/633
|
||||
n_format = n_format.tolower()
|
||||
url = "{}{}format={}".format(url, prefix, n_format)
|
||||
# If n_format == HTML, then apprise email should default to text/html and we should be sending HTML only
|
||||
|
||||
apobj.add(url)
|
||||
|
||||
@@ -168,7 +178,7 @@ def process_notification(n_object, datastore):
|
||||
log_value = logs.getvalue()
|
||||
if log_value and 'WARNING' in log_value or 'ERROR' in log_value:
|
||||
raise Exception(log_value)
|
||||
|
||||
|
||||
sent_objs.append({'title': n_title,
|
||||
'body': n_body,
|
||||
'url' : url,
|
||||
@@ -186,22 +196,23 @@ def create_notification_parameters(n_object, datastore):
|
||||
uuid = n_object['uuid'] if 'uuid' in n_object else ''
|
||||
|
||||
if uuid != '':
|
||||
watch_title = datastore.data['watching'][uuid]['title']
|
||||
watch_tag = datastore.data['watching'][uuid]['tag']
|
||||
watch_title = datastore.data['watching'][uuid].get('title', '')
|
||||
tag_list = []
|
||||
tags = datastore.get_all_tags_for_watch(uuid)
|
||||
if tags:
|
||||
for tag_uuid, tag in tags.items():
|
||||
tag_list.append(tag.get('title'))
|
||||
watch_tag = ', '.join(tag_list)
|
||||
else:
|
||||
watch_title = 'Change Detection'
|
||||
watch_tag = ''
|
||||
|
||||
# Create URLs to customise the notification with
|
||||
base_url = datastore.data['settings']['application']['base_url']
|
||||
# active_base_url - set in store.py data property
|
||||
base_url = datastore.data['settings']['application'].get('active_base_url')
|
||||
|
||||
watch_url = n_object['watch_url']
|
||||
|
||||
# Re #148 - Some people have just {{ base_url }} in the body or title, but this may break some notification services
|
||||
# like 'Join', so it's always best to atleast set something obvious so that they are not broken.
|
||||
if base_url == '':
|
||||
base_url = "<base-url-env-var-not-set>"
|
||||
|
||||
diff_url = "{}/diff/{}".format(base_url, uuid)
|
||||
preview_url = "{}/preview/{}".format(base_url, uuid)
|
||||
|
||||
@@ -211,11 +222,12 @@ def create_notification_parameters(n_object, datastore):
|
||||
# Valid_tokens also used as a field validator
|
||||
tokens.update(
|
||||
{
|
||||
'base_url': base_url if base_url is not None else '',
|
||||
'base_url': base_url,
|
||||
'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else '',
|
||||
'diff': n_object.get('diff', ''), # Null default in the case we use a test
|
||||
'diff_added': n_object.get('diff_added', ''), # Null default in the case we use a test
|
||||
'diff_full': n_object.get('diff_full', ''), # Null default in the case we use a test
|
||||
'diff_patch': n_object.get('diff_patch', ''), # Null default in the case we use a test
|
||||
'diff_removed': n_object.get('diff_removed', ''), # Null default in the case we use a test
|
||||
'diff_url': diff_url,
|
||||
'preview_url': preview_url,
|
||||
|
||||
@@ -9,7 +9,7 @@ class difference_detection_processor():
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@abstractmethod
|
||||
def run(self, uuid, skip_when_checksum_same=True):
|
||||
def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
some_data = 'xxxxx'
|
||||
update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()
|
||||
|
||||
@@ -42,11 +42,10 @@ class perform_site_check(difference_detection_processor):
|
||||
|
||||
# Unset any existing notification error
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
extra_headers = watch.get('headers', [])
|
||||
|
||||
# Tweak the base config with the per-watch ones
|
||||
request_headers = deepcopy(self.datastore.data['settings']['headers'])
|
||||
request_headers.update(extra_headers)
|
||||
request_headers = watch.get('headers', [])
|
||||
request_headers.update(self.datastore.get_all_base_headers())
|
||||
request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=uuid))
|
||||
|
||||
# https://github.com/psf/requests/issues/4525
|
||||
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
||||
|
||||
@@ -11,17 +11,19 @@ from changedetectionio import content_fetcher, html_tools
|
||||
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
|
||||
from copy import deepcopy
|
||||
from . import difference_detection_processor
|
||||
from ..html_tools import PERL_STYLE_REGEX
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
|
||||
name = 'Webpage Text/HTML, JSON and PDF changes'
|
||||
name = 'Webpage Text/HTML, JSON and PDF changes'
|
||||
description = 'Detects all text changes where possible'
|
||||
json_filter_prefixes = ['json:', 'jq:']
|
||||
|
||||
class FilterNotFoundInResponse(ValueError):
|
||||
def __init__(self, msg):
|
||||
ValueError.__init__(self, msg)
|
||||
|
||||
|
||||
class PDFToHTMLToolNotFound(ValueError):
|
||||
def __init__(self, msg):
|
||||
ValueError.__init__(self, msg)
|
||||
@@ -37,27 +39,13 @@ class perform_site_check(difference_detection_processor):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.datastore = datastore
|
||||
|
||||
# Doesn't look like python supports forward slash auto enclosure in re.findall
|
||||
# So convert it to inline flag "foobar(?i)" type configuration
|
||||
def forward_slash_enclosed_regex_to_options(self, regex):
|
||||
res = re.search(r'^/(.*?)/(\w+)$', regex, re.IGNORECASE)
|
||||
|
||||
if res:
|
||||
regex = res.group(1)
|
||||
regex += '(?{})'.format(res.group(2))
|
||||
else:
|
||||
regex += '(?{})'.format('i')
|
||||
|
||||
return regex
|
||||
|
||||
def run(self, uuid, skip_when_checksum_same=True):
|
||||
def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
|
||||
changed_detected = False
|
||||
screenshot = False # as bytes
|
||||
stripped_text_from_html = ""
|
||||
|
||||
# DeepCopy so we can be sure we don't accidently change anything by reference
|
||||
watch = deepcopy(self.datastore.data['watching'].get(uuid))
|
||||
|
||||
if not watch:
|
||||
raise Exception("Watch no longer exists.")
|
||||
|
||||
@@ -71,9 +59,9 @@ class perform_site_check(difference_detection_processor):
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
|
||||
# Tweak the base config with the per-watch ones
|
||||
extra_headers = watch.get_all_headers()
|
||||
request_headers = self.datastore.get_all_headers()
|
||||
request_headers.update(extra_headers)
|
||||
request_headers = watch.get('headers', [])
|
||||
request_headers.update(self.datastore.get_all_base_headers())
|
||||
request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=uuid))
|
||||
|
||||
# https://github.com/psf/requests/issues/4525
|
||||
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
||||
@@ -106,7 +94,11 @@ class perform_site_check(difference_detection_processor):
|
||||
# If the klass doesnt exist, just use a default
|
||||
klass = getattr(content_fetcher, "html_requests")
|
||||
|
||||
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
||||
if preferred_proxy:
|
||||
proxy_id = preferred_proxy
|
||||
else:
|
||||
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
||||
|
||||
proxy_url = None
|
||||
if proxy_id:
|
||||
proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
|
||||
@@ -132,7 +124,8 @@ class perform_site_check(difference_detection_processor):
|
||||
# requests for PDF's, images etc should be passwd the is_binary flag
|
||||
is_binary = watch.is_pdf
|
||||
|
||||
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'), is_binary=is_binary)
|
||||
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'),
|
||||
is_binary=is_binary)
|
||||
fetcher.quit()
|
||||
|
||||
self.screenshot = fetcher.screenshot
|
||||
@@ -148,7 +141,6 @@ class perform_site_check(difference_detection_processor):
|
||||
if update_obj['previous_md5_before_filters'] == watch.get('previous_md5_before_filters'):
|
||||
raise content_fetcher.checksumFromPreviousCheckWasTheSame()
|
||||
|
||||
|
||||
# Fetching complete, now filters
|
||||
# @todo move to class / maybe inside of fetcher abstract base?
|
||||
|
||||
@@ -191,21 +183,23 @@ class perform_site_check(difference_detection_processor):
|
||||
|
||||
fetcher.content = fetcher.content.replace('</body>', metadata + '</body>')
|
||||
|
||||
# Better would be if Watch.model could access the global data also
|
||||
# and then use getattr https://docs.python.org/3/reference/datamodel.html#object.__getitem__
|
||||
# https://realpython.com/inherit-python-dict/ instead of doing it procedurely
|
||||
include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='include_filters')
|
||||
include_filters_rule = [*watch.get('include_filters', []), *include_filters_from_tags]
|
||||
|
||||
include_filters_rule = deepcopy(watch.get('include_filters', []))
|
||||
# include_filters_rule = watch['include_filters']
|
||||
subtractive_selectors = watch.get(
|
||||
"subtractive_selectors", []
|
||||
) + self.datastore.data["settings"]["application"].get(
|
||||
"global_subtractive_selectors", []
|
||||
)
|
||||
subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='subtractive_selectors'),
|
||||
*watch.get("subtractive_selectors", []),
|
||||
*self.datastore.data["settings"]["application"].get("global_subtractive_selectors", [])
|
||||
]
|
||||
|
||||
# Inject a virtual LD+JSON price tracker rule
|
||||
if watch.get('track_ldjson_price_data', '') == PRICE_DATA_TRACK_ACCEPT:
|
||||
include_filters_rule.append(html_tools.LD_JSON_PRODUCT_OFFER_SELECTOR)
|
||||
include_filters_rule += html_tools.LD_JSON_PRODUCT_OFFER_SELECTORS
|
||||
|
||||
has_filter_rule = include_filters_rule and len("".join(include_filters_rule).strip())
|
||||
has_subtractive_selectors = subtractive_selectors and len(subtractive_selectors[0].strip())
|
||||
has_filter_rule = len(include_filters_rule) and len(include_filters_rule[0].strip())
|
||||
has_subtractive_selectors = len(subtractive_selectors) and len(subtractive_selectors[0].strip())
|
||||
|
||||
if is_json and not has_filter_rule:
|
||||
include_filters_rule.append("json:$")
|
||||
@@ -220,14 +214,11 @@ class perform_site_check(difference_detection_processor):
|
||||
pass
|
||||
|
||||
if has_filter_rule:
|
||||
json_filter_prefixes = ['json:', 'jq:']
|
||||
for filter in include_filters_rule:
|
||||
if any(prefix in filter for prefix in json_filter_prefixes):
|
||||
stripped_text_from_html += html_tools.extract_json_as_string(content=fetcher.content, json_filter=filter)
|
||||
is_html = False
|
||||
|
||||
|
||||
|
||||
if is_html or is_source:
|
||||
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
@@ -278,7 +269,6 @@ class perform_site_check(difference_detection_processor):
|
||||
# Re #340 - return the content before the 'ignore text' was applied
|
||||
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
|
||||
|
||||
|
||||
# @todo whitespace coming from missing rtrim()?
|
||||
# stripped_text_from_html could be based on their preferences, replace the processed text with only that which they want to know about.
|
||||
# Rewrite's the processing text based on only what diff result they want to see
|
||||
@@ -288,13 +278,13 @@ class perform_site_check(difference_detection_processor):
|
||||
# needs to not include (added) etc or it may get used twice
|
||||
# Replace the processed text with the preferred result
|
||||
rendered_diff = diff.render_diff(previous_version_file_contents=watch.get_last_fetched_before_filters(),
|
||||
newest_version_file_contents=stripped_text_from_html,
|
||||
include_equal=False, # not the same lines
|
||||
include_added=watch.get('filter_text_added', True),
|
||||
include_removed=watch.get('filter_text_removed', True),
|
||||
include_replaced=watch.get('filter_text_replaced', True),
|
||||
line_feed_sep="\n",
|
||||
include_change_type_prefix=False)
|
||||
newest_version_file_contents=stripped_text_from_html,
|
||||
include_equal=False, # not the same lines
|
||||
include_added=watch.get('filter_text_added', True),
|
||||
include_removed=watch.get('filter_text_removed', True),
|
||||
include_replaced=watch.get('filter_text_replaced', True),
|
||||
line_feed_sep="\n",
|
||||
include_change_type_prefix=False)
|
||||
|
||||
watch.save_last_fetched_before_filters(text_content_before_ignored_filter)
|
||||
|
||||
@@ -309,7 +299,12 @@ class perform_site_check(difference_detection_processor):
|
||||
# Treat pages with no renderable text content as a change? No by default
|
||||
empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
|
||||
if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0:
|
||||
raise content_fetcher.ReplyWithContentButNoText(url=url, status_code=fetcher.get_last_status_code(), screenshot=screenshot)
|
||||
raise content_fetcher.ReplyWithContentButNoText(url=url,
|
||||
status_code=fetcher.get_last_status_code(),
|
||||
screenshot=screenshot,
|
||||
has_filters=has_filter_rule,
|
||||
html_content=html_content
|
||||
)
|
||||
|
||||
# We rely on the actual text in the html output.. many sites have random script vars etc,
|
||||
# in the future we'll implement other mechanisms.
|
||||
@@ -330,16 +325,25 @@ class perform_site_check(difference_detection_processor):
|
||||
regex_matched_output = []
|
||||
for s_re in extract_text:
|
||||
# incase they specified something in '/.../x'
|
||||
regex = self.forward_slash_enclosed_regex_to_options(s_re)
|
||||
result = re.findall(regex.encode('utf-8'), stripped_text_from_html)
|
||||
if re.search(PERL_STYLE_REGEX, s_re, re.IGNORECASE):
|
||||
regex = html_tools.perl_style_slash_enclosed_regex_to_options(s_re)
|
||||
result = re.findall(regex.encode('utf-8'), stripped_text_from_html)
|
||||
|
||||
for l in result:
|
||||
if type(l) is tuple:
|
||||
# @todo - some formatter option default (between groups)
|
||||
regex_matched_output += list(l) + [b'\n']
|
||||
else:
|
||||
# @todo - some formatter option default (between each ungrouped result)
|
||||
regex_matched_output += [l] + [b'\n']
|
||||
for l in result:
|
||||
if type(l) is tuple:
|
||||
# @todo - some formatter option default (between groups)
|
||||
regex_matched_output += list(l) + [b'\n']
|
||||
else:
|
||||
# @todo - some formatter option default (between each ungrouped result)
|
||||
regex_matched_output += [l] + [b'\n']
|
||||
else:
|
||||
# Doesnt look like regex, just hunt for plaintext and return that which matches
|
||||
# `stripped_text_from_html` will be bytes, so we must encode s_re also to bytes
|
||||
r = re.compile(re.escape(s_re.encode('utf-8')), re.IGNORECASE)
|
||||
res = r.findall(stripped_text_from_html)
|
||||
if res:
|
||||
for match in res:
|
||||
regex_matched_output += [match] + [b'\n']
|
||||
|
||||
# Now we will only show what the regex matched
|
||||
stripped_text_from_html = b''
|
||||
|
||||
@@ -18,12 +18,19 @@ module.exports = async ({page, context}) => {
|
||||
|
||||
await page.setBypassCSP(true)
|
||||
await page.setExtraHTTPHeaders(req_headers);
|
||||
await page.setUserAgent(user_agent);
|
||||
|
||||
if (user_agent) {
|
||||
await page.setUserAgent(user_agent);
|
||||
}
|
||||
// https://ourcodeworld.com/articles/read/1106/how-to-solve-puppeteer-timeouterror-navigation-timeout-of-30000-ms-exceeded
|
||||
|
||||
await page.setDefaultNavigationTimeout(0);
|
||||
|
||||
if (proxy_username) {
|
||||
// Setting Proxy-Authentication header is deprecated, and doing so can trigger header change errors from Puppeteer
|
||||
// https://github.com/puppeteer/puppeteer/issues/676 ?
|
||||
// https://help.brightdata.com/hc/en-us/articles/12632549957649-Proxy-Manager-How-to-Guides#h_01HAKWR4Q0AFS8RZTNYWRDFJC2
|
||||
// https://cri.dev/posts/2020-03-30-How-to-solve-Puppeteer-Chrome-Error-ERR_INVALID_ARGUMENT/
|
||||
await page.authenticate({
|
||||
username: proxy_username,
|
||||
password: proxy_password
|
||||
|
||||
@@ -5,14 +5,19 @@ function isItemInStock() {
|
||||
'agotado',
|
||||
'artikel zurzeit vergriffen',
|
||||
'as soon as stock is available',
|
||||
'ausverkauft', // sold out
|
||||
'available for back order',
|
||||
'back-order or out of stock',
|
||||
'backordered',
|
||||
'benachrichtigt mich', // notify me
|
||||
'brak na stanie',
|
||||
'brak w magazynie',
|
||||
'coming soon',
|
||||
'currently have any tickets for this',
|
||||
'currently unavailable',
|
||||
'dostępne wkrótce',
|
||||
'en rupture de stock',
|
||||
'ist derzeit nicht auf lager',
|
||||
'item is no longer available',
|
||||
'message if back in stock',
|
||||
'nachricht bei',
|
||||
@@ -37,6 +42,7 @@ function isItemInStock() {
|
||||
'unavailable tickets',
|
||||
'we do not currently have an estimate of when this product will be back in stock.',
|
||||
'zur zeit nicht an lager',
|
||||
'已售完',
|
||||
];
|
||||
|
||||
|
||||
|
||||
@@ -2,12 +2,48 @@
|
||||
|
||||
# exit when any command fails
|
||||
set -e
|
||||
# enable debug
|
||||
set -x
|
||||
|
||||
# Test proxy list handling, starting two squids on different ports
|
||||
# Each squid adds a different header to the response, which is the main thing we test for.
|
||||
docker run --network changedet-network -d --name squid-one --hostname squid-one --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf ubuntu/squid:4.13-21.10_edge
|
||||
docker run --network changedet-network -d --name squid-two --hostname squid-two --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf ubuntu/squid:4.13-21.10_edge
|
||||
|
||||
# SOCKS5 related - start simple Socks5 proxy server
|
||||
# SOCKSTEST=xyz should show in the logs of this service to confirm it fetched
|
||||
docker run --network changedet-network -d --hostname socks5proxy --name socks5proxy -p 1080:1080 -e PROXY_USER=proxy_user123 -e PROXY_PASSWORD=proxy_pass123 serjs/go-socks5-proxy
|
||||
docker run --network changedet-network -d --hostname socks5proxy-noauth -p 1081:1080 --name socks5proxy-noauth serjs/go-socks5-proxy
|
||||
|
||||
echo "---------------------------------- SOCKS5 -------------------"
|
||||
# SOCKS5 related - test from proxies.json
|
||||
docker run --network changedet-network \
|
||||
-v `pwd`/tests/proxy_socks5/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json \
|
||||
--rm \
|
||||
-e "SOCKSTEST=proxiesjson" \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py'
|
||||
|
||||
# SOCKS5 related - by manually entering in UI
|
||||
docker run --network changedet-network \
|
||||
--rm \
|
||||
-e "SOCKSTEST=manual" \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy.py'
|
||||
|
||||
# SOCKS5 related - test from proxies.json via playwright - NOTE- PLAYWRIGHT DOESNT SUPPORT AUTHENTICATING PROXY
|
||||
docker run --network changedet-network \
|
||||
-e "SOCKSTEST=manual-playwright" \
|
||||
-v `pwd`/tests/proxy_socks5/proxies.json-example-noauth:/app/changedetectionio/test-datastore/proxies.json \
|
||||
-e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" \
|
||||
--rm \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py'
|
||||
|
||||
echo "socks5 server logs"
|
||||
docker logs socks5proxy
|
||||
echo "----------------------------------"
|
||||
|
||||
# Used for configuring a custom proxy URL via the UI
|
||||
docker run --network changedet-network -d \
|
||||
--name squid-custom \
|
||||
@@ -19,7 +55,6 @@ docker run --network changedet-network -d \
|
||||
|
||||
|
||||
## 2nd test actually choose the preferred proxy from proxies.json
|
||||
|
||||
docker run --network changedet-network \
|
||||
-v `pwd`/tests/proxy_list/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json \
|
||||
test-changedetectionio \
|
||||
@@ -44,7 +79,6 @@ fi
|
||||
|
||||
|
||||
# Test the UI configurable proxies
|
||||
|
||||
docker run --network changedet-network \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_list/test_select_custom_proxy.py'
|
||||
@@ -58,4 +92,25 @@ then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Test "no-proxy" option
|
||||
docker run --network changedet-network \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_list/test_noproxy.py'
|
||||
|
||||
# We need to handle grep returning 1
|
||||
set +e
|
||||
# Check request was never seen in any container
|
||||
for c in $(echo "squid-one squid-two squid-custom"); do
|
||||
echo Checking $c
|
||||
docker logs $c &> $c.txt
|
||||
grep noproxy $c.txt
|
||||
if [ $? -ne 1 ]
|
||||
then
|
||||
echo "Saw request for noproxy in $c container"
|
||||
cat $c.txt
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
docker kill squid-one squid-two squid-custom
|
||||
|
||||
58
changedetectionio/static/images/brightdata.svg
Normal file
58
changedetectionio/static/images/brightdata.svg
Normal file
@@ -0,0 +1,58 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
height="59.553207"
|
||||
viewBox="-0.36 95.21 25.082135 59.553208"
|
||||
width="249.99138"
|
||||
version="1.1"
|
||||
id="svg12"
|
||||
sodipodi:docname="brightdata.svg"
|
||||
inkscape:version="1.1.2 (0a00cf5339, 2022-02-04)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs16" />
|
||||
<sodipodi:namedview
|
||||
id="namedview14"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:zoom="0.9464"
|
||||
inkscape:cx="22.189349"
|
||||
inkscape:cy="-90.870668"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1051"
|
||||
inkscape:window-x="1920"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg12" />
|
||||
<path
|
||||
d="m -34.416031,129.28 c -3.97,-2.43 -5.1,-6.09 -4.32,-10.35 0.81,-4.4 3.95,-6.75 8.04,-7.75 4.23,-1.04 8.44,-0.86 12.3,1.5 0.63,0.39 0.93,0.03 1.31,-0.29 1.5,-1.26 3.27,-1.72 5.189999,-1.83 0.79,-0.05 1.04,0.24 1.01,1.01 -0.05,1.31 -0.04,2.63 0,3.95 0.02,0.65 -0.19,0.93 -0.87,0.89 -0.889999,-0.04 -1.789999,0.03 -2.669999,-0.02 -0.82,-0.04 -1.08,0.1 -0.88,1.04 0.83,3.9 -0.06,7.37 -3.1,10.06 -2.76,2.44 -6.13,3.15 -9.72,3.04 -0.51,-0.02 -1.03,-0.02 -1.52,-0.13 -1.22,-0.25 -1.96,0.14 -2.19,1.41 -0.28,1.54 0.16,2.62 1.37,3.07 0.84,0.31 1.74,0.35 2.63,0.39 2.97,0.13 5.95,-0.18 8.91,0.21 2.93,0.39 5.69,1.16 6.85,4.25 1.269999,3.38 0.809999,6.62 -1.48,9.47 -2.73,3.39 -6.52,4.78 -10.66,5.33 -3.53,0.48 -7.04,0.27 -10.39,-1.11 -3.89,-1.6 -5.75,-4.95 -4.84,-8.72 0.51,-2.11 1.85,-3.58 3.69,-4.65 0.38,-0.22 0.93,-0.32 0.28,-0.96 -2.91,-2.83 -2.85,-6.16 0.1,-8.95 0.28,-0.26 0.6,-0.53 0.96,-0.86 z m 8.07,21.5 c 0.95,0.04 1.87,-0.13 2.78,-0.33 1.89,-0.42 3.51,-1.3 4.49,-3.06 1.82,-3.25 0.24,-6.2 -3.37,-6.58 -2.88,-0.3 -5.76,0.24 -8.63,-0.13 -0.53,-0.07 -0.75,0.34 -0.95,0.71 -1.16,2.24 -1.08,4.53 0,6.73 1.15,2.34 3.46,2.48 5.68,2.66 z m -5,-30.61 c -0.03,1.67 0.08,3.19 0.74,4.61 0.76,1.62 2.17,2.42 4.03,2.31 1.62,-0.1 2.9,-1.12 3.36,-2.84 0.66,-2.46 0.69,-4.95 0.01,-7.42 -0.49,-1.76 -1.7,-2.64 -3.56,-2.7 -2.08,-0.07 -3.37,0.7 -4.04,2.42 -0.47,1.21 -0.6,2.47 -0.54,3.62 z m 32.9399993,6.56 c 0,2.59 0.05,5.18 -0.02,7.77 -0.03,1.03 0.31,1.46 1.32,1.52 0.65,0.04 1.61,-0.09 1.82,0.57 0.26,0.81 0.11,1.76 0.06,2.65 -0.03,0.48 -0.81,0.39 -0.81,0.39 l -11.47,0.01 c 0,0 -0.95,-0.21 -0.88,-0.88 0.03,-0.29 0.04,-0.6 0,-0.89 -0.19,-1.24 0.21,-1.92 1.58,-1.9 0.99,0.01 1.28,-0.52 1.28,-1.53 -0.05,-8.75 -0.05,-17.49 0,-26.24 0.01,-1.15 -0.36,-1.62 -1.44,-1.67 -0.17,-0.01 -0.34,-0.04 -0.5,-0.07 -1.43,-0.22 -2.12,-1.57 -1.53,-2.91 0.15,-0.35 0.43,-0.36 0.72,-0.4 2.94,-0.41 5.88,-0.81 8.82000002,-1.23 0.81999998,-0.12 0.99999998,0.27 0.98999998,1.01 -0.02,3.35 0,6.71 0.02,10.06 0,0.35 -0.23,0.84 0.18,1.03 0.38,0.17 0.69,-0.25 0.99,-0.45 2.56,-1.74 5.33,-2.73 8.4900007,-2.56 3.51005,0.19 5.65005,1.95 6.35005,5.46 0.42,2.09 0.52,4.21 0.51,6.33 -0.02,3.86 0.05,7.73 -0.04,11.59 -0.02,1.12 0.37,1.5 1.39,1.6 0.61,0.05 1.55,-0.13 1.74,0.47 0.26,0.85 0.12,1.84 0.1,2.77 -0.01,0.41 -0.69,0.37 -0.69,0.37 l -11.4700504,0.01 c 0,0 -0.81,-0.29 -0.8,-0.85 0.01,-0.38 0.04,-0.77 -0.01,-1.15 -0.13,-1.01 0.32,-1.52 1.31,-1.56 1.0600004,-0.05 1.3800004,-0.55 1.3500004,-1.63 -0.14,-4.84 0.16,-9.68 -0.18,-14.51 -0.26,-3.66 -2.1100004,-4.95 -5.6700007,-3.99 -0.25,0.07 -0.49,0.15 -0.73,0.22 -2.57,0.8 -2.79,1.09 -2.79,3.71 0.01,2.3 0.01,4.59 0.01,6.88 z M -109.26603,122.56 c 0,-4.75 -0.02,-9.51 0.02,-14.26 0.01,-0.92 -0.17,-1.47 -1.19,-1.45 -0.16,0 -0.33,-0.07 -0.5,-0.1 -1.56,-0.27 -2.24,-1.47 -1.69,-2.92 0.14,-0.37 0.41,-0.38 0.7,-0.42 2.98,-0.41 5.97,-0.81 8.94,-1.24 0.85,-0.12 0.88,0.33 0.88,0.96 -0.01,3.01 -0.01,6.03 0,9.04 0,0.4 -0.18,0.96 0.27,1.16 0.36,0.16 0.66,-0.3 0.96,-0.52 4.729999,-3.51 12.459999,-2.61 14.889999,4.48 1.89,5.51 1.91,11.06 -0.96,16.28 -2.37,4.31 -6.19,6.49 -11.15,6.59 -3.379999,0.07 -6.679999,-0.3 -9.909999,-1.37 -0.93,-0.31 -1.3,-0.78 -1.28,-1.83 0.05,-4.81 0.02,-9.6 0.02,-14.4 z m 7.15,3.89 c 0,2.76 0.02,5.52 -0.01,8.28 -0.01,0.76 0.18,1.29 0.91,1.64 1.899999,0.9 4.299999,0.5 5.759999,-1.01 0.97,-1 1.56,-2.21 1.96,-3.52 1.03,-3.36 0.97,-6.78 0.61,-10.22 a 9.991,9.991 0 0 0 -0.93,-3.29 c -1.47,-3.06 -4.67,-3.85 -7.439999,-1.86 -0.6,0.43 -0.88,0.93 -0.87,1.7 0.04,2.76 0.01,5.52 0.01,8.28 z"
|
||||
fill="#4280f6"
|
||||
id="path2" />
|
||||
<path
|
||||
d="m 68.644019,137.2 c -1.62,1.46 -3.41,2.56 -5.62,2.96 -4.4,0.8 -8.7,-1.39 -10.49,-5.49 -2.31,-5.31 -2.3,-10.67 -0.1,-15.98 2.31,-5.58 8.29,-8.65 14.24,-7.46 1.71,0.34 1.9,0.18 1.9,-1.55 0,-0.68 -0.05,-1.36 0.01,-2.04 0.09,-1.02 -0.25,-1.54 -1.34,-1.43 -0.64,0.06 -1.26,-0.1 -1.88,-0.21 -1.32,-0.24 -1.6,-0.62 -1.37,-1.97 0.07,-0.41 0.25,-0.57 0.65,-0.62 2.63,-0.33 5.27,-0.66 7.9,-1.02 1.04,-0.14 1.17,0.37 1.17,1.25 -0.02,10.23 -0.02,20.45 -0.01,30.68 v 1.02 c 0.02,0.99 0.35,1.6 1.52,1.47 0.52,-0.06 1.35,-0.27 1.25,0.73 -0.08,0.8 0.58,1.93 -0.94,2.18 -1.29,0.22 -2.51,0.69 -3.86,0.65 -2.04,-0.06 -2.3,-0.23 -2.76,-2.19 -0.09,-0.3 0.06,-0.67 -0.27,-0.98 z m -0.07,-12.46 c 0,-2.8 -0.04,-5.6 0.02,-8.39 0.02,-0.9 -0.28,-1.47 -1.05,-1.81 -3.18,-1.4 -7.54,-0.8 -9.3,2.87 -0.83,1.74 -1.31,3.54 -1.49,5.46 -0.28,2.93 -0.38,5.83 0.61,8.65 0.73,2.09 1.81,3.9 4.11,4.67 2.49,0.83 4.55,-0.04 6.5,-1.48 0.54,-0.4 0.62,-0.95 0.61,-1.57 -0.02,-2.8 -0.01,-5.6 -0.01,-8.4 z m 28.79,2.53 c 0,3.24 0.04,5.83 -0.02,8.41 -0.02,1 0.19,1.49 1.309998,1.41 0.55,-0.04 1.460003,-0.46 1.520003,0.73 0.05,1.02 0.1,1.89 -1.330003,2.08 -1.289998,0.17 -2.559998,0.51 -3.889998,0.48 -1.88,-0.05 -2.15,-0.26 -2.42,-2.15 -0.04,-0.27 0.14,-0.65 -0.22,-0.79 -0.34,-0.13 -0.5,0.24 -0.72,0.42 -3.61,3 -8.15,3.4 -11.64,1.08 -1.61,-1.07 -2.49,-2.63 -2.67,-4.43 -0.51,-5.13 0.77,-7.91 6.3,-10.22 2.44,-1.02 5.07,-1.27 7.68,-1.49 0.77,-0.07 1.03,-0.28 1.02,-1.05 -0.03,-1.48 -0.05,-2.94 -0.64,-4.36 -0.59,-1.42 -1.67,-1.92 -3.08,-2.03 -3.04,-0.24 -5.88,0.5 -8.63,1.71 -0.51,0.23 -1.19,0.75 -1.48,-0.13 -0.26,-0.77 -1.35,-1.61 0.05,-2.47 3.27,-2 6.7,-3.44 10.61,-3.42 1.44,0.01 2.88,0.27 4.21,0.81 2.67,1.08 3.44,3.4 3.8,5.99 0.46,3.37 0.1,6.73 0.24,9.42 z m -5.09,2.9 c 0,-1.23 -0.01,-2.46 0,-3.69 0,-0.52 -0.06,-0.98 -0.75,-0.84 -1.45,0.3 -2.93,0.28 -4.37,0.69 -3.71,1.04 -5.46,4.48 -3.97,8.03 0.51,1.22 1.48,1.98 2.79,2.16 2.01,0.28 3.86,-0.29 5.6,-1.28 0.54,-0.31 0.73,-0.76 0.72,-1.37 -0.05,-1.23 -0.02,-2.47 -0.02,-3.7 z m 43.060001,-2.89 c 0,2.72 0.01,5.43 -0.01,8.15 0,0.66 0.02,1.21 0.91,1.12 0.54,-0.06 0.99,0.12 0.86,0.75 -0.15,0.71 0.56,1.7 -0.58,2.09 -1.55,0.52 -3.16,0.59 -4.77,0.4 -0.99,-0.12 -1.12,-1.01 -1.18,-1.73 -0.08,-1.15 -0.16,-1.45 -1.24,-0.54 -3.41,2.87 -8.05,3.17 -11.43,0.88 -1.75,-1.18 -2.49,-2.91 -2.7,-4.94 -0.64,-6.24 3.16,-8.74 7.83,-10.17 2.04,-0.62 4.14,-0.8 6.24,-0.99 0.81,-0.07 1,-0.36 0.98,-1.09 -0.04,-1.31 0.04,-2.62 -0.42,-3.89 -0.57,-1.57 -1.53,-2.34 -3.18,-2.45 -3.03,-0.21 -5.88,0.46 -8.64,1.66 -0.6,0.26 -1.25,0.81 -1.68,-0.2 -0.34,-0.8 -1.08,-1.61 0.16,-2.36 4.12,-2.5 8.44,-4.16 13.36,-3.07 3.21,0.71 4.89,2.91 5.26,6.34 0.18,1.69 0.22,3.37 0.22,5.07 0.01,1.66 0.01,3.32 0.01,4.97 z m -5.09,2.54 c 0,-1.27 -0.03,-2.54 0.01,-3.81 0.02,-0.74 -0.27,-1.02 -0.98,-0.92 -1.21,0.17 -2.43,0.28 -3.62,0.55 -3.72,0.83 -5.47,3.48 -4.82,7.21 0.29,1.66 1.57,2.94 3.21,3.16 2.02,0.27 3.85,-0.34 5.57,-1.34 0.49,-0.29 0.64,-0.73 0.63,-1.29 -0.02,-1.18 0,-2.37 0,-3.56 z"
|
||||
fill="#c8dbfb"
|
||||
id="path4" />
|
||||
<path
|
||||
d="m 26.314019,125.77 c 0,-2.89 -0.05,-5.77 0.02,-8.66 0.03,-1.04 -0.33,-1.39 -1.31,-1.24 a 0.7,0.7 0 0 1 -0.25,0 c -0.57,-0.18 -1.44,0.48 -1.68,-0.58 -0.35,-1.48 -0.02,-2.3 1.21,-2.7 1.3,-0.43 2.16,-1.26 2.76,-2.46 0.78,-1.56 1.44,-3.17 1.91,-4.84 0.18,-0.63 0.47,-0.86 1.15,-0.88 3.28,-0.09 3.27,-0.11 3.32,3.17 0.01,1.06 0.09,2.12 0.09,3.18 -0.01,0.67 0.27,0.89 0.91,0.88 1.61,-0.02 3.23,0.03 4.84,-0.02 0.77,-0.02 1.01,0.23 1.03,1.01 0.08,3.27 0.1,3.27 -3.09,3.27 -0.93,0 -1.87,0.03 -2.8,-0.01 -0.67,-0.02 -0.89,0.26 -0.88,0.91 0.04,5.43 0.04,10.86 0.12,16.29 0.02,1.7 0.75,2.26 2.46,2.1 1.1,-0.1 2.19,-0.26 3.23,-0.65 0.59,-0.22 0.89,-0.09 1.14,0.53 0.93,2.29 0.92,2.37 -1.32,3.52 -2.54,1.3 -5.22,1.99 -8.1,1.79 -2.27,-0.16 -3.68,-1.27 -4.35,-3.45 -0.3,-0.98 -0.41,-1.99 -0.41,-3.01 z m -97.67005,-8.99 c 0.57,-0.84 1.11,-1.74 1.76,-2.55 1.68,-2.09 3.68,-3.62 6.54,-3.66 1.08,-0.01 1.63,0.28 1.57,1.52 -0.1,2.08 -0.05,4.16 -0.02,6.24 0.01,0.74 -0.17,0.96 -0.96,0.76 -2.36,-0.59 -4.71,-0.42 -7.03,0.28 -0.8,0.24 -1.16,0.62 -1.15,1.52 0.05,4.5 0.04,9 0,13.5 -0.01,0.89 0.29,1.16 1.15,1.2 1.23,0.06 2.44,0.32 3.67,0.39 0.75,0.05 0.91,0.38 0.89,1.04 -0.06,2.86 0.29,2.28 -2.25,2.3 -4.2,0.04 -8.41,-0.02 -12.61,0.03 -0.91,0.01 -1.39,-0.18 -1.22,-1.18 0.02,-0.12 0,-0.25 0,-0.38 0.02,-2.1 -0.24,-1.88 1.77,-2.04 1.33,-0.11 1.6,-0.67 1.58,-1.9 -0.07,-5.35 -0.04,-10.7 -0.02,-16.05 0,-0.78 -0.17,-1.2 -1,-1.46 -2.21,-0.68 -2.7,-1.69 -2.22,-3.99 0.11,-0.52 0.45,-0.56 0.82,-0.62 2.22,-0.34 4.44,-0.7 6.67,-0.99 0.99,-0.13 1.82,0.7 1.84,1.76 0.03,1.4 0.03,2.8 0.04,4.2 -0.01,0.02 0.06,0.04 0.18,0.08 z m 25.24,6.59 c 0,3.69 0.04,7.38 -0.03,11.07 -0.02,1.04 0.31,1.48 1.32,1.49 0.29,0 0.59,0.12 0.88,0.13 0.93,0.01 1.18,0.47 1.16,1.37 -0.05,2.19 0,2.19 -2.24,2.19 -3.48,0 -6.96,-0.04 -10.44,0.03 -1.09,0.02 -1.47,-0.33 -1.3,-1.36 0.02,-0.12 0.02,-0.26 0,-0.38 -0.28,-1.39 0.39,-1.96 1.7,-1.9 1.36,0.06 1.76,-0.51 1.74,-1.88 -0.09,-5.17 -0.08,-10.35 0,-15.53 0.02,-1.22 -0.32,-1.87 -1.52,-2.17 -0.57,-0.14 -1.47,-0.11 -1.57,-0.85 -0.15,-1.04 -0.05,-2.11 0.01,-3.17 0.02,-0.34 0.44,-0.35 0.73,-0.39 2.81,-0.39 5.63,-0.77 8.44,-1.18 0.92,-0.14 1.15,0.2 1.14,1.09 -0.04,3.8 -0.02,7.62 -0.02,11.44 z"
|
||||
fill="#4280f6"
|
||||
id="path6" />
|
||||
<path
|
||||
d="m 101.44402,125.64 c 0,-3.18 -0.03,-6.37 0.02,-9.55 0.02,-0.94 -0.26,-1.36 -1.22,-1.22 -0.21,0.03 -0.430003,0.04 -0.630003,0 -0.51,-0.12 -1.35,0.39 -1.44,-0.55 -0.08,-0.85 -0.429998,-1.87 0.93,-2.24 2.080003,-0.57 2.720003,-2.39 3.350003,-4.17 0.31,-0.88 0.62,-1.76 0.87,-2.66 0.18,-0.64 0.52,-0.85 1.19,-0.84 2.46,0.05 2,-0.15 2.04,2.04 0.02,1.1 0.08,2.21 -0.02,3.31 -0.11,1.16 0.46,1.52 1.46,1.53 1.78,0.01 3.57,0.04 5.35,-0.01 0.82,-0.02 1.12,0.23 1.11,1.08 -0.05,2.86 0.19,2.49 -2.42,2.51 -1.53,0.01 -3.06,0.02 -4.59,-0.01 -0.65,-0.01 -0.9,0.22 -0.9,0.89 0.02,5.52 0,11.04 0.03,16.56 0,0.67 0.14,1.34 0.25,2.01 0.17,1.04 1.17,1.62 2.59,1.42 1.29,-0.19 2.57,-0.49 3.86,-0.69 0.43,-0.07 1.05,-0.47 1.19,0.4 0.12,0.75 1.05,1.61 -0.09,2.24 -2.09,1.16 -4.28,2.07 -6.71,2.16 -1.05,0.04 -2.13,0.2 -3.16,-0.14 -1.92,-0.65 -3.03,-2.28 -3.05,-4.51 -0.02,-3.19 -0.01,-6.37 -0.01,-9.56 z"
|
||||
fill="#c8dbfb"
|
||||
id="path8" />
|
||||
<path
|
||||
d="m -50.816031,95.21 c 0.19,2.160002 1.85,3.240002 2.82,4.740002 0.25,0.379998 0.48,0.109998 0.67,-0.16 0.21,-0.31 0.6,-1.21 1.15,-1.28 -0.35,1.38 -0.04,3.149998 0.16,4.449998 0.49,3.05 -1.22,5.64 -4.07,6.18 -3.38,0.65 -6.22,-2.21 -5.6,-5.62 0.23,-1.24 1.37,-2.5 0.77,-3.699998 -0.85,-1.7 0.54,-0.52 0.79,-0.22 1.04,1.199998 1.21,0.09 1.45,-0.55 0.24,-0.63 0.31,-1.31 0.47,-1.97 0.19,-0.770002 0.55,-1.400002 1.39,-1.870002 z"
|
||||
fill="#4280f6"
|
||||
id="path10" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 11 KiB |
57
changedetectionio/static/images/oxylabs.svg
Normal file
57
changedetectionio/static/images/oxylabs.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 9.7 KiB |
@@ -208,7 +208,7 @@ $(document).ready(function () {
|
||||
console.log(x);
|
||||
if (x && first_available.length) {
|
||||
// @todo will it let you click shit that has a layer ontop? probably not.
|
||||
if (x['tagtype'] === 'text' || x['tagtype'] === 'email' || x['tagName'] === 'textarea' || x['tagtype'] === 'password' || x['tagtype'] === 'search') {
|
||||
if (x['tagtype'] === 'text' || x['tagtype'] === 'number' || x['tagtype'] === 'email' || x['tagName'] === 'textarea' || x['tagtype'] === 'password' || x['tagtype'] === 'search') {
|
||||
$('select', first_available).val('Enter text in field').change();
|
||||
$('input[type=text]', first_available).first().val(x['xpath']);
|
||||
$('input[placeholder="Value"]', first_available).addClass('ok').click().focus();
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
$(document).ready(function () {
|
||||
var csrftoken = $('input[name=csrf_token]').val();
|
||||
$.ajaxSetup({
|
||||
beforeSend: function (xhr, settings) {
|
||||
if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) {
|
||||
xhr.setRequestHeader("X-CSRFToken", csrftoken)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Load it when the #screenshot tab is in use, so we dont give a slow experience when waiting for the text diff to load
|
||||
window.addEventListener('hashchange', function (e) {
|
||||
toggle(location.hash);
|
||||
@@ -15,11 +24,71 @@ $(document).ready(function () {
|
||||
$("#settings").hide();
|
||||
} else if (hash_name === '#extract') {
|
||||
$("#settings").hide();
|
||||
}
|
||||
|
||||
|
||||
else {
|
||||
} else {
|
||||
$("#settings").show();
|
||||
}
|
||||
}
|
||||
|
||||
const article = $('.highlightable-filter')[0];
|
||||
|
||||
// We could also add the 'touchend' event for touch devices, but since
|
||||
// most iOS/Android browsers already show a dialog when you select
|
||||
// text (often with a Share option) we'll skip that
|
||||
article.addEventListener('mouseup', dragTextHandler, false);
|
||||
article.addEventListener('mousedown', clean, false);
|
||||
|
||||
function clean(event) {
|
||||
$("#highlightSnippet").remove();
|
||||
}
|
||||
|
||||
|
||||
function dragTextHandler(event) {
|
||||
console.log('mouseupped');
|
||||
|
||||
// Check if any text was selected
|
||||
if (window.getSelection().toString().length > 0) {
|
||||
|
||||
// Find out how much (if any) user has scrolled
|
||||
var scrollTop = (window.pageYOffset !== undefined) ? window.pageYOffset : (document.documentElement || document.body.parentNode || document.body).scrollTop;
|
||||
|
||||
// Get cursor position
|
||||
const posX = event.clientX;
|
||||
const posY = event.clientY + 20 + scrollTop;
|
||||
|
||||
// Append HTML to the body, create the "Tweet Selection" dialog
|
||||
document.body.insertAdjacentHTML('beforeend', '<div id="highlightSnippet" style="position: absolute; top: ' + posY + 'px; left: ' + posX + 'px;"><div class="pure-form-message-inline" style="font-size: 70%">Ignore any change on any line which contains the selected text.</div><br><a data-mode="exact" href="javascript:void(0);" class="pure-button button-secondary button-xsmall">Ignore exact text</a> </div>');
|
||||
|
||||
if (/\d/.test(window.getSelection().toString())) {
|
||||
// Offer regex replacement
|
||||
document.getElementById("highlightSnippet").insertAdjacentHTML('beforeend', '<a data-mode="digit-regex" href="javascript:void(0);" class="pure-button button-secondary button-xsmall">Ignore text including number changes</a>');
|
||||
}
|
||||
|
||||
$('#highlightSnippet a').bind('click', function (e) {
|
||||
if(!window.getSelection().toString().trim().length) {
|
||||
alert('Oops no text selected!');
|
||||
return;
|
||||
}
|
||||
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: highlight_submit_ignore_url,
|
||||
data: {'mode': $(this).data('mode'), 'selection': window.getSelection().toString()},
|
||||
statusCode: {
|
||||
400: function () {
|
||||
// More than likely the CSRF token was lost when the server restarted
|
||||
alert("There was a problem processing the request, please reload the page.");
|
||||
}
|
||||
}
|
||||
}).done(function (data) {
|
||||
$("#highlightSnippet").html(data)
|
||||
}).fail(function (data) {
|
||||
console.log(data);
|
||||
alert('There was an error communicating with the server.');
|
||||
});
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
});
|
||||
|
||||
@@ -32,5 +32,10 @@ $(document).ready(function () {
|
||||
window.getSelection().removeAllRanges();
|
||||
|
||||
});
|
||||
|
||||
$("#notification-token-toggle").click(function (e) {
|
||||
e.preventDefault();
|
||||
$('#notification-tokens-info').toggle();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
87
changedetectionio/static/js/recheck-proxy.js
Normal file
87
changedetectionio/static/js/recheck-proxy.js
Normal file
@@ -0,0 +1,87 @@
|
||||
$(function () {
|
||||
/* add container before each proxy location to show status */
|
||||
|
||||
var option_li = $('.fetch-backend-proxy li').filter(function() {
|
||||
return $("input",this)[0].value.length >0;
|
||||
});
|
||||
|
||||
//var option_li = $('.fetch-backend-proxy li');
|
||||
var isActive = false;
|
||||
$(option_li).prepend('<div class="proxy-status"></div>');
|
||||
$(option_li).append('<div class="proxy-timing"></div><div class="proxy-check-details"></div>');
|
||||
|
||||
function set_proxy_check_status(proxy_key, state) {
|
||||
// select input by value name
|
||||
const proxy_li = $('input[value="' + proxy_key + '" ]').parent();
|
||||
if (state['status'] === 'RUNNING') {
|
||||
$('.proxy-status', proxy_li).html('<span class="spinner"></span>');
|
||||
}
|
||||
if (state['status'] === 'OK') {
|
||||
$('.proxy-status', proxy_li).html('<span style="color: green; font-weight: bold" >OK</span>');
|
||||
$('.proxy-check-details', proxy_li).html(state['text']);
|
||||
}
|
||||
if (state['status'] === 'ERROR' || state['status'] === 'ERROR OTHER') {
|
||||
$('.proxy-status', proxy_li).html('<span style="color: red; font-weight: bold" >X</span>');
|
||||
$('.proxy-check-details', proxy_li).html(state['text']);
|
||||
}
|
||||
$('.proxy-timing', proxy_li).html(state['time']);
|
||||
}
|
||||
|
||||
|
||||
function pollServer() {
|
||||
if (isActive) {
|
||||
window.setTimeout(function () {
|
||||
$.ajax({
|
||||
url: proxy_recheck_status_url,
|
||||
success: function (data) {
|
||||
var all_done = true;
|
||||
$.each(data, function (proxy_key, state) {
|
||||
set_proxy_check_status(proxy_key, state);
|
||||
if (state['status'] === 'RUNNING') {
|
||||
all_done = false;
|
||||
}
|
||||
});
|
||||
|
||||
if (all_done) {
|
||||
console.log("Shutting down poller, all done.")
|
||||
isActive = false;
|
||||
} else {
|
||||
pollServer();
|
||||
}
|
||||
},
|
||||
error: function () {
|
||||
//ERROR HANDLING
|
||||
pollServer();
|
||||
}
|
||||
});
|
||||
}, 2000);
|
||||
}
|
||||
}
|
||||
|
||||
$('#check-all-proxies').click(function (e) {
|
||||
e.preventDefault()
|
||||
$('body').addClass('proxy-check-active');
|
||||
$('.proxy-check-details').html('');
|
||||
$('.proxy-status').html('<span class="spinner"></span>').fadeIn();
|
||||
$('.proxy-timing').html('');
|
||||
|
||||
// Request start, needs CSRF?
|
||||
$.ajax({
|
||||
type: "GET",
|
||||
url: recheck_proxy_start_url,
|
||||
}).done(function (data) {
|
||||
$.each(data, function (proxy_key, state) {
|
||||
set_proxy_check_status(proxy_key, state['status'])
|
||||
});
|
||||
isActive = true;
|
||||
pollServer();
|
||||
|
||||
}).fail(function (data) {
|
||||
console.log(data);
|
||||
alert('There was an error communicating with the server.');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@@ -9,13 +9,7 @@ $(document).ready(function () {
|
||||
const htmlElement = document.getElementsByTagName("html");
|
||||
const isDarkMode = htmlElement[0].dataset.darkmode === "true";
|
||||
htmlElement[0].dataset.darkmode = !isDarkMode;
|
||||
if (isDarkMode) {
|
||||
button.classList.remove("dark");
|
||||
setCookieValue(false);
|
||||
} else {
|
||||
button.classList.add("dark");
|
||||
setCookieValue(true);
|
||||
}
|
||||
setCookieValue(!isDarkMode);
|
||||
};
|
||||
|
||||
const setCookieValue = (value) => {
|
||||
|
||||
@@ -1,31 +1,45 @@
|
||||
$(function () {
|
||||
// Remove unviewed status when normally clicked
|
||||
$('.diff-link').click(function () {
|
||||
$(this).closest('.unviewed').removeClass('unviewed');
|
||||
});
|
||||
// Remove unviewed status when normally clicked
|
||||
$('.diff-link').click(function () {
|
||||
$(this).closest('.unviewed').removeClass('unviewed');
|
||||
});
|
||||
|
||||
$("#checkbox-assign-tag").click(function (e) {
|
||||
$('#op_extradata').val(prompt("Enter a tag name"));
|
||||
});
|
||||
|
||||
$('.with-share-link > *').click(function () {
|
||||
$("#copied-clipboard").remove();
|
||||
$('.with-share-link > *').click(function () {
|
||||
$("#copied-clipboard").remove();
|
||||
|
||||
var range = document.createRange();
|
||||
var n=$("#share-link")[0];
|
||||
range.selectNode(n);
|
||||
window.getSelection().removeAllRanges();
|
||||
window.getSelection().addRange(range);
|
||||
document.execCommand("copy");
|
||||
window.getSelection().removeAllRanges();
|
||||
var range = document.createRange();
|
||||
var n = $("#share-link")[0];
|
||||
range.selectNode(n);
|
||||
window.getSelection().removeAllRanges();
|
||||
window.getSelection().addRange(range);
|
||||
document.execCommand("copy");
|
||||
window.getSelection().removeAllRanges();
|
||||
|
||||
$('.with-share-link').append('<span style="font-size: 80%; color: #fff;" id="copied-clipboard">Copied to clipboard</span>');
|
||||
$("#copied-clipboard").fadeOut(2500, function() {
|
||||
$(this).remove();
|
||||
});
|
||||
});
|
||||
$('.with-share-link').append('<span style="font-size: 80%; color: #fff;" id="copied-clipboard">Copied to clipboard</span>');
|
||||
$("#copied-clipboard").fadeOut(2500, function () {
|
||||
$(this).remove();
|
||||
});
|
||||
});
|
||||
|
||||
$(".watch-table tr").click(function (event) {
|
||||
var tagName = event.target.tagName.toLowerCase();
|
||||
if (tagName === 'tr' || tagName === 'td') {
|
||||
var x = $('input[type=checkbox]', this);
|
||||
if (x) {
|
||||
$(x).click();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// checkboxes - check all
|
||||
$("#check-all").click(function (e) {
|
||||
$('input[type=checkbox]').not(this).prop('checked', this.checked);
|
||||
});
|
||||
|
||||
// checkboxes - show/hide buttons
|
||||
$("input[type=checkbox]").click(function (e) {
|
||||
if ($('input[type=checkbox]:checked').length) {
|
||||
|
||||
@@ -42,4 +42,8 @@ $(document).ready(function () {
|
||||
$('#notification_urls').val('');
|
||||
e.preventDefault();
|
||||
});
|
||||
$("#notification-token-toggle").click(function (e) {
|
||||
e.preventDefault();
|
||||
$('#notification-tokens-info').toggle();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -218,3 +218,10 @@ td#diff-col div {
|
||||
text-align: center; }
|
||||
.tab-pane-inner#screenshot img {
|
||||
max-width: 99%; }
|
||||
|
||||
#highlightSnippet {
|
||||
background: var(--color-background);
|
||||
padding: 1em;
|
||||
border-radius: 5px;
|
||||
background: var(--color-background);
|
||||
box-shadow: 1px 1px 4px var(--color-shadow-jump); }
|
||||
|
||||
@@ -119,3 +119,11 @@ td#diff-col div {
|
||||
max-width: 99%;
|
||||
}
|
||||
}
|
||||
|
||||
#highlightSnippet {
|
||||
background: var(--color-background);
|
||||
padding: 1em;
|
||||
border-radius: 5px;
|
||||
background: var(--color-background);
|
||||
box-shadow: 1px 1px 4px var(--color-shadow-jump);
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
#browser-steps .flex-wrapper {
|
||||
display: flex;
|
||||
flex-flow: row;
|
||||
height: 600px; /*@todo make this dynamic */
|
||||
height: 70vh;
|
||||
}
|
||||
|
||||
/* this is duplicate :( */
|
||||
|
||||
25
changedetectionio/static/styles/scss/parts/_darkmode.scss
Normal file
25
changedetectionio/static/styles/scss/parts/_darkmode.scss
Normal file
@@ -0,0 +1,25 @@
|
||||
|
||||
#toggle-light-mode {
|
||||
width: 3rem;
|
||||
/* default */
|
||||
.icon-dark {
|
||||
display: none;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
html[data-darkmode="true"] {
|
||||
#toggle-light-mode {
|
||||
.icon-light {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.icon-dark {
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ ul#requests-extra_proxies {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* each proxy entry is a `table` */
|
||||
table {
|
||||
tr {
|
||||
@@ -15,3 +16,47 @@ ul#requests-extra_proxies {
|
||||
}
|
||||
}
|
||||
|
||||
#request {
|
||||
/* Auto proxy scan/checker */
|
||||
label[for=proxy] {
|
||||
display: inline-block;
|
||||
}
|
||||
}
|
||||
|
||||
body.proxy-check-active {
|
||||
#request {
|
||||
.proxy-status {
|
||||
width: 2em;
|
||||
}
|
||||
|
||||
.proxy-check-details {
|
||||
font-size: 80%;
|
||||
color: #555;
|
||||
display: block;
|
||||
padding-left: 4em;
|
||||
}
|
||||
|
||||
.proxy-timing {
|
||||
font-size: 80%;
|
||||
padding-left: 1rem;
|
||||
color: var(--color-link);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#recommended-proxy {
|
||||
display: grid;
|
||||
gap: 2rem;
|
||||
@media (min-width: 991px) {
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
}
|
||||
|
||||
> div {
|
||||
border: 1px #aaa solid;
|
||||
border-radius: 4px;
|
||||
padding: 1em;
|
||||
}
|
||||
|
||||
padding-bottom: 1em;
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
@import "parts/_pagination";
|
||||
@import "parts/_spinners";
|
||||
@import "parts/_variables";
|
||||
@import "parts/_darkmode";
|
||||
|
||||
body {
|
||||
color: var(--color-text);
|
||||
@@ -54,22 +55,6 @@ a.github-link {
|
||||
}
|
||||
}
|
||||
|
||||
#toggle-light-mode {
|
||||
width: 3rem;
|
||||
.icon-dark {
|
||||
display: none;
|
||||
}
|
||||
|
||||
&.dark {
|
||||
.icon-light {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.icon-dark {
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#toggle-search {
|
||||
width: 2rem;
|
||||
|
||||
@@ -50,8 +50,7 @@
|
||||
#browser-steps .flex-wrapper {
|
||||
display: flex;
|
||||
flex-flow: row;
|
||||
height: 600px;
|
||||
/*@todo make this dynamic */ }
|
||||
height: 70vh; }
|
||||
|
||||
/* this is duplicate :( */
|
||||
#browsersteps-selector-wrapper {
|
||||
@@ -95,6 +94,37 @@ ul#requests-extra_proxies {
|
||||
ul#requests-extra_proxies table tr {
|
||||
display: inline; }
|
||||
|
||||
#request {
|
||||
/* Auto proxy scan/checker */ }
|
||||
#request label[for=proxy] {
|
||||
display: inline-block; }
|
||||
|
||||
body.proxy-check-active #request .proxy-status {
|
||||
width: 2em; }
|
||||
|
||||
body.proxy-check-active #request .proxy-check-details {
|
||||
font-size: 80%;
|
||||
color: #555;
|
||||
display: block;
|
||||
padding-left: 4em; }
|
||||
|
||||
body.proxy-check-active #request .proxy-timing {
|
||||
font-size: 80%;
|
||||
padding-left: 1rem;
|
||||
color: var(--color-link); }
|
||||
|
||||
#recommended-proxy {
|
||||
display: grid;
|
||||
gap: 2rem;
|
||||
padding-bottom: 1em; }
|
||||
@media (min-width: 991px) {
|
||||
#recommended-proxy {
|
||||
grid-template-columns: repeat(2, 1fr); } }
|
||||
#recommended-proxy > div {
|
||||
border: 1px #aaa solid;
|
||||
border-radius: 4px;
|
||||
padding: 1em; }
|
||||
|
||||
.pagination-page-info {
|
||||
color: #fff;
|
||||
font-size: 0.85rem;
|
||||
@@ -297,6 +327,18 @@ html[data-darkmode="true"] {
|
||||
html[data-darkmode="true"] .watch-table .unviewed.error {
|
||||
color: var(--color-watch-table-error); }
|
||||
|
||||
#toggle-light-mode {
|
||||
width: 3rem;
|
||||
/* default */ }
|
||||
#toggle-light-mode .icon-dark {
|
||||
display: none; }
|
||||
|
||||
html[data-darkmode="true"] #toggle-light-mode .icon-light {
|
||||
display: none; }
|
||||
|
||||
html[data-darkmode="true"] #toggle-light-mode .icon-dark {
|
||||
display: block; }
|
||||
|
||||
body {
|
||||
color: var(--color-text);
|
||||
background: var(--color-background-page); }
|
||||
@@ -331,15 +373,6 @@ a.github-link {
|
||||
a.github-link:hover {
|
||||
color: var(--color-icon-github-hover); }
|
||||
|
||||
#toggle-light-mode {
|
||||
width: 3rem; }
|
||||
#toggle-light-mode .icon-dark {
|
||||
display: none; }
|
||||
#toggle-light-mode.dark .icon-light {
|
||||
display: none; }
|
||||
#toggle-light-mode.dark .icon-dark {
|
||||
display: block; }
|
||||
|
||||
#toggle-search {
|
||||
width: 2rem; }
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from distutils.util import strtobool
|
||||
|
||||
from flask import (
|
||||
flash
|
||||
)
|
||||
@@ -16,6 +18,11 @@ import threading
|
||||
import time
|
||||
import uuid as uuid_builder
|
||||
|
||||
# Because the server will run as a daemon and wont know the URL for notification links when firing off a notification
|
||||
BASE_URL_NOT_SET_TEXT = '("Base URL" not set - see settings - notifications)'
|
||||
|
||||
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
|
||||
|
||||
# Is there an existing library to ensure some data store (JSON etc) is in sync with CRUD methods?
|
||||
# Open a github issue if you know something :)
|
||||
# https://stackoverflow.com/questions/6190468/how-to-trigger-function-on-value-change
|
||||
@@ -35,6 +42,7 @@ class ChangeDetectionStore:
|
||||
self.__data = App.model()
|
||||
self.datastore_path = datastore_path
|
||||
self.json_store_path = "{}/url-watches.json".format(self.datastore_path)
|
||||
print(">>> Datastore path is ", self.json_store_path)
|
||||
self.needs_write = False
|
||||
self.start_time = time.time()
|
||||
self.stop_thread = False
|
||||
@@ -171,26 +179,21 @@ class ChangeDetectionStore:
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
# Re #152, Return env base_url if not overriden, @todo also prefer the proxy pass url
|
||||
env_base_url = os.getenv('BASE_URL','')
|
||||
if not self.__data['settings']['application']['base_url']:
|
||||
self.__data['settings']['application']['base_url'] = env_base_url.strip('" ')
|
||||
# Re #152, Return env base_url if not overriden
|
||||
# Re #148 - Some people have just {{ base_url }} in the body or title, but this may break some notification services
|
||||
# like 'Join', so it's always best to atleast set something obvious so that they are not broken.
|
||||
|
||||
return self.__data
|
||||
active_base_url = BASE_URL_NOT_SET_TEXT
|
||||
if self.__data['settings']['application'].get('base_url'):
|
||||
active_base_url = self.__data['settings']['application'].get('base_url')
|
||||
elif os.getenv('BASE_URL'):
|
||||
active_base_url = os.getenv('BASE_URL')
|
||||
|
||||
def get_all_tags(self):
|
||||
tags = []
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
if watch['tag'] is None:
|
||||
continue
|
||||
# Support for comma separated list of tags.
|
||||
for tag in watch['tag'].split(','):
|
||||
tag = tag.strip()
|
||||
if tag not in tags:
|
||||
tags.append(tag)
|
||||
|
||||
tags.sort()
|
||||
return tags
|
||||
# I looked at various ways todo the following, but in the end just copying the dict seemed simplest/most reliable
|
||||
# even given the memory tradeoff - if you know a better way.. maybe return d|self.__data.. or something
|
||||
d = self.__data
|
||||
d['settings']['application']['active_base_url'] = active_base_url.strip('" ')
|
||||
return d
|
||||
|
||||
# Delete a single watch by UUID
|
||||
def delete(self, uuid):
|
||||
@@ -204,22 +207,22 @@ class ChangeDetectionStore:
|
||||
# GitHub #30 also delete history records
|
||||
for uuid in self.data['watching']:
|
||||
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
|
||||
shutil.rmtree(path)
|
||||
self.needs_write_urgent = True
|
||||
if os.path.exists(path):
|
||||
shutil.rmtree(path)
|
||||
|
||||
else:
|
||||
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
|
||||
shutil.rmtree(path)
|
||||
if os.path.exists(path):
|
||||
shutil.rmtree(path)
|
||||
del self.data['watching'][uuid]
|
||||
|
||||
self.needs_write_urgent = True
|
||||
self.needs_write_urgent = True
|
||||
|
||||
# Clone a watch by UUID
|
||||
def clone(self, uuid):
|
||||
url = self.data['watching'][uuid]['url']
|
||||
tag = self.data['watching'][uuid]['tag']
|
||||
url = self.data['watching'][uuid].get('url')
|
||||
extras = self.data['watching'][uuid]
|
||||
new_uuid = self.add_watch(url=url, tag=tag, extras=extras)
|
||||
new_uuid = self.add_watch(url=url, extras=extras)
|
||||
return new_uuid
|
||||
|
||||
def url_exists(self, url):
|
||||
@@ -254,16 +257,14 @@ class ChangeDetectionStore:
|
||||
|
||||
self.needs_write_urgent = True
|
||||
|
||||
def add_watch(self, url, tag="", extras=None, write_to_disk_now=True):
|
||||
def add_watch(self, url, tag='', extras=None, tag_uuids=None, write_to_disk_now=True):
|
||||
|
||||
if extras is None:
|
||||
extras = {}
|
||||
# should always be str
|
||||
if tag is None or not tag:
|
||||
tag = ''
|
||||
|
||||
# Incase these are copied across, assume it's a reference and deepcopy()
|
||||
apply_extras = deepcopy(extras)
|
||||
apply_extras['tags'] = [] if not apply_extras.get('tags') else apply_extras.get('tags')
|
||||
|
||||
# Was it a share link? try to fetch the data
|
||||
if (url.startswith("https://changedetection.io/share/")):
|
||||
@@ -290,6 +291,7 @@ class ChangeDetectionStore:
|
||||
'processor',
|
||||
'subtractive_selectors',
|
||||
'tag',
|
||||
'tags',
|
||||
'text_should_not_be_present',
|
||||
'title',
|
||||
'trigger_text',
|
||||
@@ -312,25 +314,39 @@ class ChangeDetectionStore:
|
||||
flash('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX', 'error')
|
||||
return None
|
||||
|
||||
with self.lock:
|
||||
# #Re 569
|
||||
new_watch = Watch.model(datastore_path=self.datastore_path, default={
|
||||
'url': url,
|
||||
'tag': tag,
|
||||
'date_created': int(time.time())
|
||||
})
|
||||
if tag and type(tag) == str:
|
||||
# Then it's probably a string of the actual tag by name, split and add it
|
||||
for t in tag.split(','):
|
||||
# for each stripped tag, add tag as UUID
|
||||
for a_t in t.split(','):
|
||||
tag_uuid = self.add_tag(a_t)
|
||||
apply_extras['tags'].append(tag_uuid)
|
||||
|
||||
new_uuid = new_watch['uuid']
|
||||
logging.debug("Added URL {} - {}".format(url, new_uuid))
|
||||
# Or if UUIDs given directly
|
||||
if tag_uuids:
|
||||
apply_extras['tags'] = list(set(apply_extras['tags'] + tag_uuids))
|
||||
|
||||
for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']:
|
||||
if k in apply_extras:
|
||||
del apply_extras[k]
|
||||
# Make any uuids unique
|
||||
if apply_extras.get('tags'):
|
||||
apply_extras['tags'] = list(set(apply_extras.get('tags')))
|
||||
|
||||
new_watch.update(apply_extras)
|
||||
self.__data['watching'][new_uuid] = new_watch
|
||||
new_watch = Watch.model(datastore_path=self.datastore_path, url=url)
|
||||
|
||||
new_uuid = new_watch.get('uuid')
|
||||
|
||||
logging.debug("Added URL {} - {}".format(url, new_uuid))
|
||||
|
||||
for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']:
|
||||
if k in apply_extras:
|
||||
del apply_extras[k]
|
||||
|
||||
if not apply_extras.get('date_created'):
|
||||
apply_extras['date_created'] = int(time.time())
|
||||
|
||||
new_watch.update(apply_extras)
|
||||
new_watch.ensure_data_dir_exists()
|
||||
self.__data['watching'][new_uuid] = new_watch
|
||||
|
||||
self.__data['watching'][new_uuid].ensure_data_dir_exists()
|
||||
|
||||
if write_to_disk_now:
|
||||
self.sync_to_json()
|
||||
@@ -470,6 +486,8 @@ class ChangeDetectionStore:
|
||||
k = "ui-" + str(i) + proxy.get('proxy_name')
|
||||
proxy_list[k] = {'label': proxy.get('proxy_name'), 'url': proxy.get('proxy_url')}
|
||||
|
||||
if proxy_list and strtobool(os.getenv('ENABLE_NO_PROXY_OPTION', 'True')):
|
||||
proxy_list["no-proxy"] = {'label': "No proxy", 'url': ''}
|
||||
|
||||
return proxy_list if len(proxy_list) else None
|
||||
|
||||
@@ -487,6 +505,9 @@ class ChangeDetectionStore:
|
||||
# If it's a valid one
|
||||
watch = self.data['watching'].get(uuid)
|
||||
|
||||
if strtobool(os.getenv('ENABLE_NO_PROXY_OPTION', 'True')) and watch.get('proxy') == "no-proxy":
|
||||
return None
|
||||
|
||||
if watch.get('proxy') and watch.get('proxy') in list(self.proxy_list.keys()):
|
||||
return watch.get('proxy')
|
||||
|
||||
@@ -510,10 +531,19 @@ class ChangeDetectionStore:
|
||||
filepath = os.path.join(self.datastore_path, 'headers.txt')
|
||||
return os.path.isfile(filepath)
|
||||
|
||||
def get_all_headers(self):
|
||||
def get_all_base_headers(self):
|
||||
from .model.App import parse_headers_from_text_file
|
||||
headers = copy(self.data['settings'].get('headers', {}))
|
||||
headers = {}
|
||||
# Global app settings
|
||||
headers.update(self.data['settings'].get('headers', {}))
|
||||
|
||||
return headers
|
||||
|
||||
def get_all_headers_in_textfile_for_watch(self, uuid):
|
||||
from .model.App import parse_headers_from_text_file
|
||||
headers = {}
|
||||
|
||||
# Global in /datastore/headers.txt
|
||||
filepath = os.path.join(self.datastore_path, 'headers.txt')
|
||||
try:
|
||||
if os.path.isfile(filepath):
|
||||
@@ -521,8 +551,79 @@ class ChangeDetectionStore:
|
||||
except Exception as e:
|
||||
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||
|
||||
watch = self.data['watching'].get(uuid)
|
||||
if watch:
|
||||
|
||||
# In /datastore/xyz-xyz/headers.txt
|
||||
filepath = os.path.join(watch.watch_data_dir, 'headers.txt')
|
||||
try:
|
||||
if os.path.isfile(filepath):
|
||||
headers.update(parse_headers_from_text_file(filepath))
|
||||
except Exception as e:
|
||||
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||
|
||||
# In /datastore/tag-name.txt
|
||||
tags = self.get_all_tags_for_watch(uuid=uuid)
|
||||
for tag_uuid, tag in tags.items():
|
||||
fname = "headers-"+re.sub(r'[\W_]', '', tag.get('title')).lower().strip() + ".txt"
|
||||
filepath = os.path.join(self.datastore_path, fname)
|
||||
try:
|
||||
if os.path.isfile(filepath):
|
||||
headers.update(parse_headers_from_text_file(filepath))
|
||||
except Exception as e:
|
||||
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||
|
||||
return headers
|
||||
|
||||
def get_tag_overrides_for_watch(self, uuid, attr):
|
||||
tags = self.get_all_tags_for_watch(uuid=uuid)
|
||||
ret = []
|
||||
|
||||
if tags:
|
||||
for tag_uuid, tag in tags.items():
|
||||
if attr in tag and tag[attr]:
|
||||
ret=[*ret, *tag[attr]]
|
||||
|
||||
return ret
|
||||
|
||||
def add_tag(self, name):
|
||||
# If name exists, return that
|
||||
n = name.strip().lower()
|
||||
print (f">>> Adding new tag - '{n}'")
|
||||
if not n:
|
||||
return False
|
||||
|
||||
for uuid, tag in self.__data['settings']['application'].get('tags', {}).items():
|
||||
if n == tag.get('title', '').lower().strip():
|
||||
print (f">>> Tag {name} already exists")
|
||||
return uuid
|
||||
|
||||
# Eventually almost everything todo with a watch will apply as a Tag
|
||||
# So we use the same model as a Watch
|
||||
with self.lock:
|
||||
new_tag = Watch.model(datastore_path=self.datastore_path, default={
|
||||
'title': name.strip(),
|
||||
'date_created': int(time.time())
|
||||
})
|
||||
|
||||
new_uuid = new_tag.get('uuid')
|
||||
|
||||
self.__data['settings']['application']['tags'][new_uuid] = new_tag
|
||||
|
||||
return new_uuid
|
||||
|
||||
def get_all_tags_for_watch(self, uuid):
|
||||
"""This should be in Watch model but Watch doesn't have access to datastore, not sure how to solve that yet"""
|
||||
watch = self.data['watching'].get(uuid)
|
||||
|
||||
# Should return a dict of full tag info linked by UUID
|
||||
if watch:
|
||||
return dictfilt(self.__data['settings']['application']['tags'], watch.get('tags', []))
|
||||
|
||||
return {}
|
||||
|
||||
def tag_exists_by_name(self, tag_name):
|
||||
return any(v.get('title', '').lower() == tag_name.lower() for k, v in self.__data['settings']['application']['tags'].items())
|
||||
|
||||
# Run all updates
|
||||
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
|
||||
@@ -700,12 +801,24 @@ class ChangeDetectionStore:
|
||||
continue
|
||||
return
|
||||
|
||||
# We don't know when the date_created was in the past until now, so just add an index number for now.
|
||||
def update_11(self):
|
||||
# Create tag objects and their references from existing tag text
|
||||
def update_12(self):
|
||||
i = 0
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
# Split out and convert old tag string
|
||||
tag = watch.get('tag')
|
||||
if tag:
|
||||
tag_uuids = []
|
||||
for t in tag.split(','):
|
||||
tag_uuids.append(self.add_tag(name=t))
|
||||
|
||||
self.data['watching'][uuid]['tags'] = tag_uuids
|
||||
|
||||
# #1775 - Update 11 did not update the records correctly when adding 'date_created' values for sorting
|
||||
def update_13(self):
|
||||
i = 0
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
if not watch.get('date_created'):
|
||||
watch['date_created'] = i
|
||||
self.data['watching'][uuid]['date_created'] = i
|
||||
i+=1
|
||||
return
|
||||
|
||||
return
|
||||
@@ -13,9 +13,9 @@
|
||||
<div class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Use <a target=_new href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service! <i><a target=_new href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes">Please read the notification services wiki here for important configuration notes</a></i>.</li>
|
||||
<li><code>discord://</code> only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li>
|
||||
<li><code>tgram://</code> bots cant send messages to other bots, so you should specify chat ID of non-bot user.</li>
|
||||
<li><code>tgram://</code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li>
|
||||
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_discord">discord://</a></code> (or <code>https://discord.com/api/webhooks...</code>)) </code> only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li>
|
||||
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> bots can't send messages to other bots, so you should specify chat ID of non-bot user.</li>
|
||||
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li>
|
||||
<li><code>gets://</code>, <code>posts://</code>, <code>puts://</code>, <code>deletes://</code> for direct API calls (or omit the "<code>s</code>" for non-SSL ie <code>get://</code>)</li>
|
||||
<li>Accepts the <code>{{ '{{token}}' }}</code> placeholders listed below</li>
|
||||
</ul>
|
||||
@@ -35,18 +35,14 @@
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_body , rows=5, class="notification-body", placeholder=settings_application['notification_body']) }}
|
||||
<span class="pure-form-message-inline">Body for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
<!-- unsure -->
|
||||
{{ render_field(form.notification_format , class="notification-format") }}
|
||||
<span class="pure-form-message-inline">Format for all notifications</span>
|
||||
<span class="pure-form-message-inline">Body for all notifications ‐ You can use <a target="_new" href="https://jinja.palletsprojects.com/en/3.0.x/templates/">Jinja2</a> templating in the notification title, body and URL, and tokens from below.
|
||||
</span>
|
||||
|
||||
</div>
|
||||
<div class="pure-controls">
|
||||
<p class="pure-form-message-inline">
|
||||
You can use <a target="_new" href="https://jinja.palletsprojects.com/en/3.0.x/templates/">Jinja2</a> templating in the notification title, body and URL.
|
||||
</p>
|
||||
|
||||
<div id="notification-token-toggle" class="pure-button button-tag button-xsmall">Show token/placeholders</div>
|
||||
</div>
|
||||
<div class="pure-controls" style="display: none;" id="notification-tokens-info">
|
||||
<table class="pure-table" id="token-table">
|
||||
<thead>
|
||||
<tr>
|
||||
@@ -99,9 +95,13 @@
|
||||
<td><code>{{ '{{diff_full}}' }}</code></td>
|
||||
<td>The diff output - full difference output</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{diff_patch}}' }}</code></td>
|
||||
<td>The diff output - patch in unified format</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{current_snapshot}}' }}</code></td>
|
||||
<td>The current snapshot value, useful when combined with JSON or CSS filters
|
||||
<td>The current snapshot text contents value, useful when combined with JSON or CSS filters
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
@@ -111,12 +111,15 @@
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="pure-form-message-inline">
|
||||
<br>
|
||||
URLs generated by changedetection.io (such as <code>{{ '{{diff_url}}' }}</code>) require the <code>BASE_URL</code> environment variable set.<br>
|
||||
Your <code>BASE_URL</code> var is currently "{{settings_application['current_base_url']}}"
|
||||
<br>
|
||||
Warning: Contents of <code>{{ '{{diff}}' }}</code>, <code>{{ '{{diff_removed}}' }}</code>, and <code>{{ '{{diff_added}}' }}</code> depend on how the difference algorithm perceives the change. For example, an addition or removal could be perceived as a change in some cases. <a target="_new" href="https://github.com/dgtlmoon/changedetection.io/wiki/Using-the-%7B%7Bdiff%7D%7D,-%7B%7Bdiff_added%7D%7D,-and-%7B%7Bdiff_removed%7D%7D-notification-tokens">More Here</a> <br>
|
||||
<p>
|
||||
Warning: Contents of <code>{{ '{{diff}}' }}</code>, <code>{{ '{{diff_removed}}' }}</code>, and <code>{{ '{{diff_added}}' }}</code> depend on how the difference algorithm perceives the change. <br>
|
||||
For example, an addition or removal could be perceived as a change in some cases. <a target="_new" href="https://github.com/dgtlmoon/changedetection.io/wiki/Using-the-%7B%7Bdiff%7D%7D,-%7B%7Bdiff_added%7D%7D,-and-%7B%7Bdiff_removed%7D%7D-notification-tokens">More Here</a> <br>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_format , class="notification-format") }}
|
||||
<span class="pure-form-message-inline">Format for all notifications</span>
|
||||
</div>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{% macro render_field(field) %}
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
||||
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||
{% if field.errors %}
|
||||
<ul class=errors>
|
||||
{% for error in field.errors %}
|
||||
@@ -25,18 +24,6 @@
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro render_field(field) %}
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||
{% if field.errors %}
|
||||
<ul class=errors>
|
||||
{% for error in field.errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro render_simple_field(field) %}
|
||||
<span class="label {% if field.errors %}error{% endif %}">{{ field.label }}</span>
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
<div class="header">
|
||||
<div class="home-menu pure-menu pure-menu-horizontal pure-menu-fixed" id="nav-menu">
|
||||
{% if has_password and not current_user.is_authenticated %}
|
||||
<a class="pure-menu-heading" href="https://github.com/dgtlmoon/changedetection.io" rel="noopener">
|
||||
<a class="pure-menu-heading" href="https://changedetection.io" rel="noopener">
|
||||
<strong>Change</strong>Detection.io</a>
|
||||
{% else %}
|
||||
<a class="pure-menu-heading" href="{{url_for('index')}}">
|
||||
@@ -49,7 +49,7 @@
|
||||
{% else %}
|
||||
{% if new_version_available and not(has_password and not current_user.is_authenticated) %}
|
||||
<span id="new-version-text" class="pure-menu-heading">
|
||||
<a href="https://github.com/dgtlmoon/changedetection.io">A new version is available</a>
|
||||
<a href="https://changedetection.io">A new version is available</a>
|
||||
</span>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
@@ -58,6 +58,9 @@
|
||||
{% if current_user.is_authenticated or not has_password %}
|
||||
{% if not
|
||||
current_diff_url %}
|
||||
<li class="pure-menu-item">
|
||||
<a href="{{ url_for('tags.tags_overview_page')}}" class="pure-menu-link">GROUPS</a>
|
||||
</li>
|
||||
<li class="pure-menu-item">
|
||||
<a href="{{ url_for('settings_page')}}" class="pure-menu-link">SETTINGS</a>
|
||||
</li>
|
||||
@@ -74,7 +77,7 @@
|
||||
{% endif %}
|
||||
{% else %}
|
||||
<li class="pure-menu-item">
|
||||
<a class="pure-menu-link" href="https://github.com/dgtlmoon/changedetection.io">Website Change Detection and Notification.</a>
|
||||
<a class="pure-menu-link" href="https://changedetection.io">Website Change Detection and Notification.</a>
|
||||
</li>
|
||||
{% endif %}
|
||||
{% if current_user.is_authenticated %}
|
||||
@@ -86,17 +89,14 @@
|
||||
<!-- We use GET here so it offers people a chance to set bookmarks etc -->
|
||||
<form name="searchForm" action="" method="GET">
|
||||
<input id="search-q" class="" name="q" placeholder="URL or Title {% if active_tag %}in '{{ active_tag }}'{% endif %}" required="" type="text" value="">
|
||||
<input name="tag" type="hidden" value="{% if active_tag %}{{active_tag}}{% endif %}">
|
||||
<input name="tags" type="hidden" value="{% if active_tag %}{{active_tag}}{% endif %}">
|
||||
<button class="toggle-button " id="toggle-search" type="button" title="Search, or Use Alt+S Key" >
|
||||
{% include "svgs/search-icon.svg" %}
|
||||
</button>
|
||||
</form>
|
||||
</li>
|
||||
<li class="pure-menu-item">
|
||||
{% if dark_mode %}
|
||||
{% set darkClass = 'dark' %}
|
||||
{% endif %}
|
||||
<button class="toggle-button {{darkClass}}" id ="toggle-light-mode" type="button" title="Toggle Light/Dark Mode">
|
||||
<button class="toggle-button" id ="toggle-light-mode" type="button" title="Toggle Light/Dark Mode">
|
||||
<span class="visually-hidden">Toggle light/dark mode</span>
|
||||
<span class="icon-light">
|
||||
{% include "svgs/light-mode-toggle-icon.svg" %}
|
||||
|
||||
@@ -6,6 +6,9 @@
|
||||
{% if last_error_screenshot %}
|
||||
const error_screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid, error_screenshot=1) }}";
|
||||
{% endif %}
|
||||
|
||||
const highlight_submit_ignore_url="{{url_for('highlight_submit_ignore_url', uuid=uuid)}}";
|
||||
|
||||
</script>
|
||||
<script src="{{url_for('static_content', group='js', filename='diff-overview.js')}}" defer></script>
|
||||
|
||||
@@ -76,7 +79,7 @@
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="text">
|
||||
<div class="tip">Pro-tip: Use <strong>show current snapshot</strong> tab to visualise what will be ignored.</div>
|
||||
<div class="tip">Pro-tip: Use <strong>show current snapshot</strong> tab to visualise what will be ignored, highlight text to add to ignore filters</div>
|
||||
|
||||
{% if password_enabled_and_share_is_off %}
|
||||
<div class="tip">Pro-tip: You can enable <strong>"share access when password is enabled"</strong> from settings</div>
|
||||
@@ -91,7 +94,7 @@
|
||||
<td id="a" style="display: none;">{{previous}}</td>
|
||||
<td id="b" style="display: none;">{{newest}}</td>
|
||||
<td id="diff-col">
|
||||
<span id="result"></span>
|
||||
<span id="result" class="highlightable-filter"></span>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
|
||||
@@ -4,18 +4,19 @@
|
||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<script>
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||
const watch_visual_selector_data_url="{{url_for('static_content', group='visual_selector_data', filename=uuid)}}";
|
||||
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
|
||||
const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %};
|
||||
|
||||
{% if emailprefix %}
|
||||
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
|
||||
{% endif %}
|
||||
|
||||
const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}');
|
||||
const browser_steps_start_url="{{url_for('browser_steps.browsersteps_start_session', uuid=uuid)}}";
|
||||
const browser_steps_sync_url="{{url_for('browser_steps.browsersteps_ui_update', uuid=uuid)}}";
|
||||
{% if emailprefix %}
|
||||
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
|
||||
{% endif %}
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||
const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %};
|
||||
const recheck_proxy_start_url="{{url_for('check_proxies.start_check', uuid=uuid)}}";
|
||||
const proxy_recheck_status_url="{{url_for('check_proxies.get_recheck_status', uuid=uuid)}}";
|
||||
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
|
||||
const watch_visual_selector_data_url="{{url_for('static_content', group='visual_selector_data', filename=uuid)}}";
|
||||
|
||||
</script>
|
||||
|
||||
@@ -27,6 +28,8 @@
|
||||
<script src="{{url_for('static_content', group='js', filename='browser-steps.js')}}" defer></script>
|
||||
{% endif %}
|
||||
|
||||
<script src="{{url_for('static_content', group='js', filename='recheck-proxy.js')}}" defer></script>
|
||||
|
||||
<div class="edit-form monospaced-textarea">
|
||||
|
||||
<div class="tabs collapsable">
|
||||
@@ -75,7 +78,7 @@
|
||||
{{ render_field(form.title, class="m-d") }}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.tag) }}
|
||||
{{ render_field(form.tags) }}
|
||||
<span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
@@ -111,7 +114,8 @@
|
||||
</div>
|
||||
{% if form.proxy %}
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_field(form.proxy, class="fetch-backend-proxy") }}
|
||||
<div>{{ form.proxy.label }} <a href="" id="check-all-proxies" class="pure-button button-secondary button-xsmall" >Check/Scan all</a></div>
|
||||
<div>{{ form.proxy(class="fetch-backend-proxy") }}</div>
|
||||
<span class="pure-form-message-inline">
|
||||
Choose a proxy for this watch
|
||||
</span>
|
||||
@@ -374,15 +378,16 @@ Unavailable") }}
|
||||
{{ render_field(form.extract_text, rows=5, placeholder="\d+ online") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Extracts text in the final output (line by line) after other filters using regular expressions;
|
||||
<li>Extracts text in the final output (line by line) after other filters using regular expressions or string match;
|
||||
<ul>
|
||||
<li>Regular expression ‐ example <code>/reports.+?2022/i</code></li>
|
||||
<li>Don't forget to consider the white-space at the start of a line <code>/.+?reports.+?2022/i</code></li>
|
||||
<li>Use <code>//(?aiLmsux))</code> type flags (more <a href="https://docs.python.org/3/library/re.html#index-15">information here</a>)<br></li>
|
||||
<li>Keyword example ‐ example <code>Out of stock</code></li>
|
||||
<li>Use groups to extract just that text ‐ example <code>/reports.+?(\d+)/i</code> returns a list of years only</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>One line per regular-expression/ string match</li>
|
||||
<li>One line per regular-expression/string match</li>
|
||||
</ul>
|
||||
</span>
|
||||
</div>
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
{% if last_error_screenshot %}
|
||||
const error_screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid, error_screenshot=1) }}";
|
||||
{% endif %}
|
||||
const highlight_submit_ignore_url="{{url_for('highlight_submit_ignore_url', uuid=uuid)}}";
|
||||
</script>
|
||||
<script src="{{url_for('static_content', group='js', filename='diff-overview.js')}}" defer></script>
|
||||
|
||||
@@ -20,7 +21,7 @@
|
||||
{% endif %}
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<form><input type="hidden" name="csrf_token" value="{{ csrf_token() }}"></form>
|
||||
<div id="diff-ui">
|
||||
<div class="tab-pane-inner" id="error-text">
|
||||
<div class="snapshot-age error">{{watch.error_text_ctime|format_seconds_ago}} seconds ago</div>
|
||||
@@ -36,11 +37,12 @@
|
||||
|
||||
<div class="tab-pane-inner" id="text">
|
||||
<div class="snapshot-age">{{watch.snapshot_text_ctime|format_timestamp_timeago}}</div>
|
||||
<span class="ignored">Grey lines are ignored</span> <span class="triggered">Blue lines are triggers</span>
|
||||
<span class="ignored">Grey lines are ignored</span> <span class="triggered">Blue lines are triggers</span> <span class="tip"><strong>Pro-tip</strong>: Highlight text to add to ignore filters</span>
|
||||
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td id="diff-col">
|
||||
<td id="diff-col" class="highlightable-filter">
|
||||
{% for row in content %}
|
||||
<div class="{{row.classes}}">{{row.line}}</div>
|
||||
{% endfor %}
|
||||
|
||||
@@ -62,14 +62,6 @@
|
||||
<span class="pure-form-message-inline">Allow access to view watch diff page when password is enabled (Good for sharing the diff page)
|
||||
</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.application.form.base_url, placeholder="http://yoursite.com:5000/",
|
||||
class="m-d") }}
|
||||
<span class="pure-form-message-inline">
|
||||
Base URL used for the <code>{{ '{{ base_url }}' }}</code> token in notifications and RSS links.<br>Default value is the ENV var 'BASE_URL' (Currently "{{settings_application['current_base_url']}}"),
|
||||
<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Configurable-BASE_URL-setting">read more here</a>.
|
||||
</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.application.form.pager_size) }}
|
||||
<span class="pure-form-message-inline">Number of items per page in the watch overview list, 0 to disable.</span>
|
||||
@@ -100,6 +92,13 @@
|
||||
{{ render_common_settings_form(form.application.form, emailprefix, settings_application) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
<div class="pure-control-group" id="notification-base-url">
|
||||
{{ render_field(form.application.form.base_url, class="m-d") }}
|
||||
<span class="pure-form-message-inline">
|
||||
Base URL used for the <code>{{ '{{ base_url }}' }}</code> token in notification links.<br>
|
||||
Default value is the system environment variable '<code>BASE_URL</code>' - <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Configurable-BASE_URL-setting">read more here</a>.
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="fetching">
|
||||
@@ -181,20 +180,57 @@ nav
|
||||
</div>
|
||||
</div>
|
||||
<div class="tab-pane-inner" id="proxies">
|
||||
<div id="recommended-proxy">
|
||||
<div>
|
||||
<img style="height: 2em;" src="{{url_for('static_content', group='images', filename='brightdata.svg')}}" alt="BrightData Proxy Provider">
|
||||
<p>BrightData offer world-class proxy services, "Data Center" proxies are a very affordable way to proxy your requests, whilst <strong><a href="https://brightdata.grsm.io/n0r16zf7eivq">WebUnlocker</a></strong> can help solve most CAPTCHAs.</p>
|
||||
<p>
|
||||
BrightData offer many <a href="https://brightdata.com/proxy-types" target="new">many different types of proxies</a>, it is worth reading about what is best for your use-case.
|
||||
</p>
|
||||
|
||||
<p><strong>Tip</strong>: You can connect to websites using <a href="https://brightdata.grsm.io/n0r16zf7eivq">BrightData</a> proxies, their service <strong>WebUnlocker</strong> will solve most CAPTCHAs, whilst their <strong>Residential Proxies</strong> may help to avoid CAPTCHA altogether. </p>
|
||||
<p>It may be easier to try <strong>WebUnlocker</strong> first, WebUnlocker also supports country selection.</p>
|
||||
<p>
|
||||
When you have <a href="https://brightdata.grsm.io/n0r16zf7eivq">registered</a>, enabled the required services, visit the <A href="https://brightdata.com/cp/api_example?">API example page</A>, then select <strong>Python</strong>, set the country you wish to use, then copy+paste the access Proxy URL into the "Extra Proxies" boxes below.<br>
|
||||
</p>
|
||||
<p>
|
||||
The Proxy URL with BrightData should start with <code>http://brd-customer...</code>
|
||||
</p>
|
||||
<p>When you sign up using <a href="https://brightdata.grsm.io/n0r16zf7eivq">https://brightdata.grsm.io/n0r16zf7eivq</a> BrightData will match any first deposit up to $150</p>
|
||||
</div>
|
||||
<div>
|
||||
<img style="height: 2em;"
|
||||
src="{{url_for('static_content', group='images', filename='oxylabs.svg')}}"
|
||||
alt="Oxylabs Proxy Provider">
|
||||
<p>
|
||||
Collect public data at scale with industry-leading web scraping solutions and the world’s
|
||||
largest ethical proxy network.
|
||||
</p>
|
||||
<p>
|
||||
Oxylabs also provide a <a href="https://oxylabs.io/products/web-unblocker"><strong>WebUnlocker</strong></a>
|
||||
proxy that bypasses sophisticated anti-bot systems, so you don’t have to.<br>
|
||||
</p>
|
||||
<p>
|
||||
Serve over <a href="https://oxylabs.io/location-proxy">195 countries</a>, providing <a
|
||||
href="https://oxylabs.io/products/residential-proxy-pool">Residential</a>, <a
|
||||
href="https://oxylabs.io/products/mobile-proxies">Mobile</a> and <a
|
||||
href="https://oxylabs.io/products/rotating-isp-proxies">ISP proxies</a> and much more.
|
||||
</p>
|
||||
<p>
|
||||
Use the promo code <strong>boost35</strong> with this link <a href="https://oxylabs.go2cloud.org/SH2d">https://oxylabs.go2cloud.org/SH2d</a> for 35% off Residential, Mobile proxies, Web Unblocker, and Scraper APIs. Built-in proxies enable you to access data from all around the world and help overcome anti-bot solutions.
|
||||
|
||||
</p>
|
||||
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<p>
|
||||
When you have <a href="https://brightdata.grsm.io/n0r16zf7eivq">registered</a>, enabled the required services, visit the <A href="https://brightdata.com/cp/api_example?">API example page</A>, then select <strong>Python</strong>, set the country you wish to use, then copy+paste the example URL below<br>
|
||||
The Proxy URL with BrightData should start with <code>http://brd-customer...</code>
|
||||
Your proxy provider may need to whitelist our IP of <code>204.15.192.195</code>
|
||||
</p>
|
||||
|
||||
<p>When you sign up using <a href="https://brightdata.grsm.io/n0r16zf7eivq">https://brightdata.grsm.io/n0r16zf7eivq</a> BrightData will match any first deposit up to $150</p>
|
||||
|
||||
<p><strong>Tip</strong>: "Residential" and "Mobile" proxy type can be more successfull than "Data Center" for blocked websites.
|
||||
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.requests.form.extra_proxies) }}
|
||||
<span class="pure-form-message-inline">"Name" will be used for selecting the proxy in the Watch Edit settings</span>
|
||||
{{ render_field(form.requests.form.extra_proxies) }}
|
||||
<span class="pure-form-message-inline">"Name" will be used for selecting the proxy in the Watch Edit settings</span>
|
||||
<span class="pure-form-message-inline">SOCKS5 proxies with authentication are only supported with 'plain requests' fetcher, for other fetchers you should whitelist the IP access instead</span>
|
||||
</div>
|
||||
</div>
|
||||
<div id="actions">
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<div id="watch-add-wrapper-zone">
|
||||
<div>
|
||||
{{ render_simple_field(form.url, placeholder="https://...", required=true) }}
|
||||
{{ render_simple_field(form.tag, value=active_tag if active_tag else '', placeholder="watch label / tag") }}
|
||||
{{ render_simple_field(form.tags, value=tags[active_tag].title if active_tag else '', placeholder="watch label / tag") }}
|
||||
</div>
|
||||
<div>
|
||||
{{ render_simple_field(form.watch_submit_button, title="Watch this URL!" ) }}
|
||||
@@ -30,12 +30,14 @@
|
||||
|
||||
<form class="pure-form" action="{{ url_for('form_watch_list_checkbox_operations') }}" method="POST" id="watch-list-form">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||
<input type="hidden" id="op_extradata" name="op_extradata" value="" >
|
||||
<div id="checkbox-operations">
|
||||
<button class="pure-button button-secondary button-xsmall" name="op" value="pause">Pause</button>
|
||||
<button class="pure-button button-secondary button-xsmall" name="op" value="unpause">UnPause</button>
|
||||
<button class="pure-button button-secondary button-xsmall" name="op" value="mute">Mute</button>
|
||||
<button class="pure-button button-secondary button-xsmall" name="op" value="unmute">UnMute</button>
|
||||
<button class="pure-button button-secondary button-xsmall" name="op" value="recheck">Recheck</button>
|
||||
<button class="pure-button button-secondary button-xsmall" name="op" value="assign-tag" id="checkbox-assign-tag">Tag</button>
|
||||
<button class="pure-button button-secondary button-xsmall" name="op" value="mark-viewed">Mark viewed</button>
|
||||
<button class="pure-button button-secondary button-xsmall" name="op" value="notification-default">Use default notification</button>
|
||||
<button class="pure-button button-secondary button-xsmall" style="background: #dd4242;" name="op" value="clear-history">Clear/reset history</button>
|
||||
@@ -47,9 +49,9 @@
|
||||
{% if search_q %}<div id="search-result-info">Searching "<strong><i>{{search_q}}</i></strong>"</div>{% endif %}
|
||||
<div>
|
||||
<a href="{{url_for('index')}}" class="pure-button button-tag {{'active' if not active_tag }}">All</a>
|
||||
{% for tag in tags %}
|
||||
{% for uuid, tag in tags.items() %}
|
||||
{% if tag != "" %}
|
||||
<a href="{{url_for('index', tag=tag) }}" class="pure-button button-tag {{'active' if active_tag == tag }}">{{ tag }}</a>
|
||||
<a href="{{url_for('index', tag=uuid) }}" class="pure-button button-tag {{'active' if active_tag == uuid }}">{{ tag.title }}</a>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</div>
|
||||
@@ -117,6 +119,9 @@
|
||||
<a href="{{ url_for('settings_page', uuid=watch.uuid) }}#proxies">Try adding external proxies/locations</a>
|
||||
|
||||
{% endif %}
|
||||
{% if 'empty result or contain only an image' in watch.last_error %}
|
||||
<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Detecting-changes-in-images">more help here</a>.
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if watch.last_notification_error is defined and watch.last_notification_error != False %}
|
||||
@@ -143,9 +148,11 @@
|
||||
</span>
|
||||
{% endif %}
|
||||
|
||||
{% if not active_tag %}
|
||||
<span class="watch-tag-list">{{ watch.tag}}</span>
|
||||
{% endif %}
|
||||
|
||||
{% for watch_tag_uuid, watch_tag in datastore.get_all_tags_for_watch(watch['uuid']).items() %}
|
||||
<span class="watch-tag-list">{{ watch_tag.title }}</span>
|
||||
{% endfor %}
|
||||
|
||||
</td>
|
||||
<td class="last-checked">{{watch|format_last_checked_time|safe}}</td>
|
||||
<td class="last-changed">{% if watch.history_n >=2 and watch.last_changed >0 %}
|
||||
@@ -178,7 +185,7 @@
|
||||
{% endif %}
|
||||
<li>
|
||||
<a href="{{ url_for('form_watch_checknow', tag=active_tag) }}" class="pure-button button-tag ">Recheck
|
||||
all {% if active_tag%}in "{{active_tag}}"{%endif%}</a>
|
||||
all {% if active_tag%} in "{{tags[active_tag].title}}"{%endif%}</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='Generic_Feed-icon.svg')}}" height="15"></a>
|
||||
|
||||
@@ -28,8 +28,6 @@ def test_fetch_webdriver_content(client, live_server):
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(3)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
|
||||
|
||||
@@ -2,12 +2,11 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from ..util import live_server_setup
|
||||
from ..util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
def test_preferred_proxy(client, live_server):
|
||||
time.sleep(1)
|
||||
live_server_setup(live_server)
|
||||
time.sleep(1)
|
||||
url = "http://chosen.changedetection.io"
|
||||
|
||||
res = client.post(
|
||||
@@ -20,7 +19,7 @@ def test_preferred_proxy(client, live_server):
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
@@ -28,11 +27,11 @@ def test_preferred_proxy(client, live_server):
|
||||
"fetch_backend": "html_requests",
|
||||
"headers": "",
|
||||
"proxy": "proxy-two",
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"url": url,
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
# Now the request should appear in the second-squid logs
|
||||
|
||||
77
changedetectionio/tests/proxy_list/test_noproxy.py
Normal file
77
changedetectionio/tests/proxy_list/test_noproxy.py
Normal file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
|
||||
|
||||
def test_noproxy_option(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
# Run by run_proxy_tests.sh
|
||||
# Call this URL then scan the containers that it never went through them
|
||||
url = "http://noproxy.changedetection.io"
|
||||
|
||||
# Should only be available when a proxy is setup
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1))
|
||||
assert b'No proxy' not in res.data
|
||||
|
||||
# Setup a proxy
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-ignore_whitespace": "y",
|
||||
"application-fetch_backend": "html_requests",
|
||||
"requests-extra_proxies-0-proxy_name": "custom-one-proxy",
|
||||
"requests-extra_proxies-0-proxy_url": "http://test:awesome@squid-one:3128",
|
||||
"requests-extra_proxies-1-proxy_name": "custom-two-proxy",
|
||||
"requests-extra_proxies-1-proxy_url": "http://test:awesome@squid-two:3128",
|
||||
"requests-extra_proxies-2-proxy_name": "custom-proxy",
|
||||
"requests-extra_proxies-2-proxy_url": "http://test:awesome@squid-custom:3128",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Should be available as an option
|
||||
res = client.get(
|
||||
url_for("settings_page", unpause_on_save=1))
|
||||
assert b'No proxy' in res.data
|
||||
|
||||
|
||||
# This will add it paused
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||
uuid = extract_UUID_from_client(client)
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid=uuid, unpause_on_save=1))
|
||||
assert b'No proxy' in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid=uuid, unpause_on_save=1),
|
||||
data={
|
||||
"include_filters": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"headers": "",
|
||||
"proxy": "no-proxy",
|
||||
"tags": "",
|
||||
"url": url,
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"unpaused" in res.data
|
||||
wait_for_all_checks(client)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
# Now the request should NOT appear in the second-squid logs (handled by the run_test_proxies.sh script)
|
||||
|
||||
# Prove that it actually checked
|
||||
|
||||
assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] != 0
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"socks5proxy": {
|
||||
"label": "socks5proxy",
|
||||
"url": "socks5://proxy_user123:proxy_pass123@socks5proxy:1080"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"socks5proxy": {
|
||||
"label": "socks5proxy",
|
||||
"url": "socks5://socks5proxy-noauth:1080"
|
||||
}
|
||||
}
|
||||
63
changedetectionio/tests/proxy_socks5/test_socks5_proxy.py
Normal file
63
changedetectionio/tests/proxy_socks5/test_socks5_proxy.py
Normal file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/python3
|
||||
import os
|
||||
import time
|
||||
from flask import url_for
|
||||
from changedetectionio.tests.util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
def test_socks5(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Setup a proxy
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-ignore_whitespace": "y",
|
||||
"application-fetch_backend": "html_requests",
|
||||
# set in .github/workflows/test-only.yml
|
||||
"requests-extra_proxies-0-proxy_url": "socks5://proxy_user123:proxy_pass123@socks5proxy:1080",
|
||||
"requests-extra_proxies-0-proxy_name": "socks5proxy",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
test_url = "https://changedetection.io/CHANGELOG.txt?socks-test-tag=" + os.getenv('SOCKSTEST', '')
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||
)
|
||||
# check the proxy is offered as expected
|
||||
assert b'ui-0socks5proxy' in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||
data={
|
||||
"include_filters": "",
|
||||
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
|
||||
"headers": "",
|
||||
"proxy": "ui-0socks5proxy",
|
||||
"tags": "",
|
||||
"url": test_url,
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"unpaused" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Should see the proper string
|
||||
assert "+0200:".encode('utf-8') in res.data
|
||||
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/python3
|
||||
import os
|
||||
import time
|
||||
from flask import url_for
|
||||
from changedetectionio.tests.util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
# should be proxies.json mounted from run_proxy_tests.sh already
|
||||
# -v `pwd`/tests/proxy_socks5/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json
|
||||
def test_socks5_from_proxiesjson_file(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
test_url = "https://changedetection.io/CHANGELOG.txt?socks-test-tag=" + os.getenv('SOCKSTEST', '')
|
||||
|
||||
res = client.get(url_for("settings_page"))
|
||||
assert b'name="requests-proxy" type="radio" value="socks5proxy"' in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||
)
|
||||
# check the proxy is offered as expected
|
||||
assert b'name="proxy" type="radio" value="socks5proxy"' in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||
data={
|
||||
"include_filters": "",
|
||||
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
|
||||
"headers": "",
|
||||
"proxy": "socks5proxy",
|
||||
"tags": "",
|
||||
"url": test_url,
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"unpaused" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Should see the proper string
|
||||
assert "+0200:".encode('utf-8') in res.data
|
||||
@@ -77,7 +77,7 @@ def test_restock_detection(client, live_server):
|
||||
|
||||
client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tag": '', 'processor': 'restock_diff'},
|
||||
data={"url": test_url, "tags": '', 'processor': 'restock_diff'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
42
changedetectionio/tests/smtp/smtp-test-server.py
Executable file
42
changedetectionio/tests/smtp/smtp-test-server.py
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/usr/bin/python3
|
||||
import smtpd
|
||||
import asyncore
|
||||
|
||||
# Accept a SMTP message and offer a way to retrieve the last message via TCP Socket
|
||||
|
||||
last_received_message = b"Nothing"
|
||||
|
||||
|
||||
class CustomSMTPServer(smtpd.SMTPServer):
|
||||
|
||||
def process_message(self, peer, mailfrom, rcpttos, data, **kwargs):
|
||||
global last_received_message
|
||||
last_received_message = data
|
||||
print('Receiving message from:', peer)
|
||||
print('Message addressed from:', mailfrom)
|
||||
print('Message addressed to :', rcpttos)
|
||||
print('Message length :', len(data))
|
||||
print(data.decode('utf8'))
|
||||
return
|
||||
|
||||
|
||||
# Just print out the last message received on plain TCP socket server
|
||||
class EchoServer(asyncore.dispatcher):
|
||||
|
||||
def __init__(self, host, port):
|
||||
asyncore.dispatcher.__init__(self)
|
||||
self.create_socket()
|
||||
self.set_reuse_addr()
|
||||
self.bind((host, port))
|
||||
self.listen(5)
|
||||
|
||||
def handle_accepted(self, sock, addr):
|
||||
global last_received_message
|
||||
print('Incoming connection from %s' % repr(addr))
|
||||
sock.send(last_received_message)
|
||||
last_received_message = b''
|
||||
|
||||
|
||||
server = CustomSMTPServer(('0.0.0.0', 11025), None) # SMTP mail goes here
|
||||
server2 = EchoServer('0.0.0.0', 11080) # Echo back last message received
|
||||
asyncore.loop()
|
||||
165
changedetectionio/tests/smtp/test_notification_smtp.py
Normal file
165
changedetectionio/tests/smtp/test_notification_smtp.py
Normal file
@@ -0,0 +1,165 @@
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import re
|
||||
from flask import url_for
|
||||
from changedetectionio.tests.util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup, \
|
||||
wait_for_all_checks, \
|
||||
set_longer_modified_response
|
||||
from changedetectionio.tests.util import extract_UUID_from_client
|
||||
import logging
|
||||
import base64
|
||||
|
||||
# NOTE - RELIES ON mailserver as hostname running, see github build recipes
|
||||
smtp_test_server = 'mailserver'
|
||||
|
||||
from changedetectionio.notification import (
|
||||
default_notification_body,
|
||||
default_notification_format,
|
||||
default_notification_title,
|
||||
valid_notification_formats,
|
||||
)
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def get_last_message_from_smtp_server():
|
||||
import socket
|
||||
global smtp_test_server
|
||||
port = 11080 # socket server port number
|
||||
|
||||
client_socket = socket.socket() # instantiate
|
||||
client_socket.connect((smtp_test_server, port)) # connect to the server
|
||||
|
||||
data = client_socket.recv(50024).decode() # receive response
|
||||
client_socket.close() # close the connection
|
||||
return data
|
||||
|
||||
|
||||
# Requires running the test SMTP server
|
||||
|
||||
def test_check_notification_email_formats_default_HTML(client, live_server):
|
||||
# live_server_setup(live_server)
|
||||
set_original_response()
|
||||
|
||||
global smtp_test_server
|
||||
notification_url = f'mailto://changedetection@{smtp_test_server}:11025/?to=fff@home.com'
|
||||
|
||||
#####################
|
||||
# Set this up for when we remove the notification from the watch, it should fallback with these details
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={"application-notification_urls": notification_url,
|
||||
"application-notification_title": "fallback-title " + default_notification_title,
|
||||
"application-notification_body": "fallback-body<br> " + default_notification_body,
|
||||
"application-notification_format": 'HTML',
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Add a watch and trigger a HTTP POST
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": 'nice one'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
set_longer_modified_response()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
time.sleep(3)
|
||||
|
||||
msg = get_last_message_from_smtp_server()
|
||||
assert len(msg) >= 1
|
||||
|
||||
# The email should have two bodies, and the text/html part should be <br>
|
||||
assert 'Content-Type: text/plain' in msg
|
||||
assert '(added) So let\'s see what happens.\n' in msg # The plaintext part with \n
|
||||
assert 'Content-Type: text/html' in msg
|
||||
assert '(added) So let\'s see what happens.<br>' in msg # the html part
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
def test_check_notification_email_formats_default_Text_override_HTML(client, live_server):
|
||||
# live_server_setup(live_server)
|
||||
|
||||
# HTML problems? see this
|
||||
# https://github.com/caronc/apprise/issues/633
|
||||
|
||||
set_original_response()
|
||||
global smtp_test_server
|
||||
notification_url = f'mailto://changedetection@{smtp_test_server}:11025/?to=fff@home.com'
|
||||
|
||||
#####################
|
||||
# Set this up for when we remove the notification from the watch, it should fallback with these details
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={"application-notification_urls": notification_url,
|
||||
"application-notification_title": "fallback-title " + default_notification_title,
|
||||
"application-notification_body": default_notification_body,
|
||||
"application-notification_format": 'Text',
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Add a watch and trigger a HTTP POST
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": 'nice one'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
set_longer_modified_response()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
time.sleep(3)
|
||||
msg = get_last_message_from_smtp_server()
|
||||
assert len(msg) >= 1
|
||||
# with open('/tmp/m.txt', 'w') as f:
|
||||
# f.write(msg)
|
||||
|
||||
# The email should not have two bodies, should be TEXT only
|
||||
|
||||
assert 'Content-Type: text/plain' in msg
|
||||
assert '(added) So let\'s see what happens.\n' in msg # The plaintext part with \n
|
||||
|
||||
set_original_response()
|
||||
# Now override as HTML format
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"notification_format": 'HTML',
|
||||
'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
time.sleep(3)
|
||||
msg = get_last_message_from_smtp_server()
|
||||
assert len(msg) >= 1
|
||||
|
||||
# The email should have two bodies, and the text/html part should be <br>
|
||||
assert 'Content-Type: text/plain' in msg
|
||||
assert '(removed) So let\'s see what happens.\n' in msg # The plaintext part with \n
|
||||
assert 'Content-Type: text/html' in msg
|
||||
assert '(removed) So let\'s see what happens.<br>' in msg # the html part
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
@@ -45,6 +45,15 @@ def test_check_access_control(app, client, live_server):
|
||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||
assert b'Random content' in res.data
|
||||
|
||||
# Check wrong password does not let us in
|
||||
res = c.post(
|
||||
url_for("login"),
|
||||
data={"password": "WRONG PASSWORD"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"LOG OUT" not in res.data
|
||||
assert b"Incorrect password" in res.data
|
||||
|
||||
|
||||
# Menu should not be available yet
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from changedetectionio import html_tools
|
||||
|
||||
|
||||
@@ -39,7 +39,6 @@ def test_setup(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def test_check_removed_line_contains_trigger(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
@@ -54,7 +53,7 @@ def test_check_removed_line_contains_trigger(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
@@ -67,20 +66,20 @@ def test_check_removed_line_contains_trigger(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
set_original(excluding='Something irrelevant')
|
||||
|
||||
# A line thats not the trigger should not trigger anything
|
||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
assert b'1 watches queued for rechecking.' in res.data
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
|
||||
# The trigger line is REMOVED, this should trigger
|
||||
set_original(excluding='The golden line')
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
@@ -89,14 +88,14 @@ def test_check_removed_line_contains_trigger(client, live_server):
|
||||
client.get(url_for("mark_all_viewed"), follow_redirects=True)
|
||||
set_original(excluding=None)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
|
||||
# Remove it again, and we should get a trigger
|
||||
set_original(excluding='The golden line')
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
@@ -105,8 +104,7 @@ def test_check_removed_line_contains_trigger(client, live_server):
|
||||
|
||||
|
||||
def test_check_add_line_contains_trigger(client, live_server):
|
||||
|
||||
sleep_time_for_fetch_thread = 3
|
||||
#live_server_setup(live_server)
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
@@ -136,8 +134,7 @@ def test_check_add_line_contains_trigger(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
wait_for_all_checks(client)
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
@@ -150,23 +147,25 @@ def test_check_add_line_contains_trigger(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
set_original(excluding='Something irrelevant')
|
||||
|
||||
# A line thats not the trigger should not trigger anything
|
||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
assert b'1 watches queued for rechecking.' in res.data
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
|
||||
# The trigger line is ADDED, this should trigger
|
||||
set_original(add_line='<p>Oh yes please</p>')
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
# Takes a moment for apprise to fire
|
||||
time.sleep(3)
|
||||
with open("test-datastore/notification.txt", 'r') as f:
|
||||
response= f.read()
|
||||
assert '-Oh yes please-' in response
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, extract_api_key_from_UI
|
||||
from .util import live_server_setup, extract_api_key_from_UI, wait_for_all_checks
|
||||
|
||||
import json
|
||||
import uuid
|
||||
@@ -57,6 +57,7 @@ def test_setup(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def test_api_simple(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
|
||||
api_key = extract_api_key_from_UI(client)
|
||||
|
||||
@@ -86,7 +87,7 @@ def test_api_simple(client, live_server):
|
||||
watch_uuid = res.json.get('uuid')
|
||||
assert res.status_code == 201
|
||||
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Verify its in the list and that recheck worked
|
||||
res = client.get(
|
||||
@@ -107,7 +108,7 @@ def test_api_simple(client, live_server):
|
||||
)
|
||||
assert len(res.json) == 0
|
||||
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
set_modified_response()
|
||||
# Trigger recheck of all ?recheck_all=1
|
||||
@@ -115,7 +116,7 @@ def test_api_simple(client, live_server):
|
||||
url_for("createwatch", recheck_all='1'),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Did the recheck fire?
|
||||
res = client.get(
|
||||
@@ -266,7 +267,7 @@ def test_api_watch_PUT_update(client, live_server):
|
||||
|
||||
#live_server_setup(live_server)
|
||||
api_key = extract_api_key_from_UI(client)
|
||||
time.sleep(1)
|
||||
|
||||
# Create a watch
|
||||
set_original_response()
|
||||
test_url = url_for('test_endpoint', _external=True,
|
||||
@@ -282,7 +283,6 @@ def test_api_watch_PUT_update(client, live_server):
|
||||
|
||||
assert res.status_code == 201
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
# Get a listing, it will be the first one
|
||||
res = client.get(
|
||||
@@ -297,6 +297,8 @@ def test_api_watch_PUT_update(client, live_server):
|
||||
url_for("edit_page", uuid=watch_uuid),
|
||||
)
|
||||
assert b"cookie: yum" in res.data, "'cookie: yum' found in 'headers' section"
|
||||
assert b"One" in res.data, "Tag 'One' was found"
|
||||
assert b"Two" in res.data, "Tag 'Two' was found"
|
||||
|
||||
# HTTP PUT ( UPDATE an existing watch )
|
||||
res = client.put(
|
||||
@@ -319,7 +321,8 @@ def test_api_watch_PUT_update(client, live_server):
|
||||
)
|
||||
assert b"new title" in res.data, "new title found in edit page"
|
||||
assert b"552" in res.data, "552 minutes found in edit page"
|
||||
assert b"One, Two" in res.data, "Tag 'One, Two' was found"
|
||||
assert b"One" in res.data, "Tag 'One' was found"
|
||||
assert b"Two" in res.data, "Tag 'Two' was found"
|
||||
assert b"cookie: all eaten" in res.data, "'cookie: all eaten' found in 'headers' section"
|
||||
|
||||
######################################################
|
||||
|
||||
@@ -24,7 +24,7 @@ def test_basic_auth(client, live_server):
|
||||
# Check form validation
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": "", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, extract_UUID_from_client, extract_api_key_from_UI
|
||||
from .util import live_server_setup, extract_UUID_from_client, extract_api_key_from_UI, wait_for_all_checks
|
||||
|
||||
|
||||
def set_response_with_ldjson():
|
||||
test_return_data = """<html>
|
||||
@@ -27,7 +28,7 @@ def set_response_with_ldjson():
|
||||
"description":"You dont need it",
|
||||
"mpn":"111111",
|
||||
"sku":"22222",
|
||||
"offers":{
|
||||
"Offers":{
|
||||
"@type":"AggregateOffer",
|
||||
"lowPrice":8097000,
|
||||
"highPrice":8099900,
|
||||
@@ -75,12 +76,11 @@ def set_response_without_ldjson():
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
# actually only really used by the distll.io importer, but could be handy too
|
||||
def test_check_ldjson_price_autodetect(client, live_server):
|
||||
def test_setup(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
# actually only really used by the distll.io importer, but could be handy too
|
||||
def test_check_ldjson_price_autodetect(client, live_server):
|
||||
|
||||
set_response_with_ldjson()
|
||||
|
||||
@@ -92,7 +92,7 @@ def test_check_ldjson_price_autodetect(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Should get a notice that it's available
|
||||
res = client.get(url_for("index"))
|
||||
@@ -102,11 +102,11 @@ def test_check_ldjson_price_autodetect(client, live_server):
|
||||
uuid = extract_UUID_from_client(client)
|
||||
|
||||
client.get(url_for('price_data_follower.accept', uuid=uuid, follow_redirects=True))
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
# Offer should be gone
|
||||
res = client.get(url_for("index"))
|
||||
assert b'Embedded price data' not in res.data
|
||||
@@ -138,9 +138,97 @@ def test_check_ldjson_price_autodetect(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'ldjson-price-track-offer' not in res.data
|
||||
|
||||
##########################################################################################
|
||||
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
|
||||
def _test_runner_check_bad_format_ignored(live_server, client, has_ldjson_price_data):
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
for k,v in client.application.config.get('DATASTORE').data['watching'].items():
|
||||
assert v.get('last_error') == False
|
||||
assert v.get('has_ldjson_price_data') == has_ldjson_price_data
|
||||
|
||||
|
||||
##########################################################################################
|
||||
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
|
||||
def test_bad_ldjson_is_correctly_ignored(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
test_return_data = """
|
||||
<html>
|
||||
<head>
|
||||
<script type="application/ld+json">
|
||||
{
|
||||
"@context": "http://schema.org",
|
||||
"@type": ["Product", "SubType"],
|
||||
"name": "My test product",
|
||||
"description": "",
|
||||
"offers": {
|
||||
"note" : "You can see the case-insensitive OffERS key, it should work",
|
||||
"@type": "Offer",
|
||||
"offeredBy": {
|
||||
"@type": "Organization",
|
||||
"name":"Person",
|
||||
"telephone":"+1 999 999 999"
|
||||
},
|
||||
"price": "1",
|
||||
"priceCurrency": "EUR",
|
||||
"url": "/some/url"
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="yes">Some extra stuff</div>
|
||||
</body></html>
|
||||
"""
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
_test_runner_check_bad_format_ignored(live_server=live_server, client=client, has_ldjson_price_data=True)
|
||||
test_return_data = """
|
||||
<html>
|
||||
<head>
|
||||
<script type="application/ld+json">
|
||||
{
|
||||
"@context": "http://schema.org",
|
||||
"@type": ["Product", "SubType"],
|
||||
"name": "My test product",
|
||||
"description": "",
|
||||
"BrokenOffers": {
|
||||
"@type": "Offer",
|
||||
"offeredBy": {
|
||||
"@type": "Organization",
|
||||
"name":"Person",
|
||||
"telephone":"+1 999 999 999"
|
||||
},
|
||||
"price": "1",
|
||||
"priceCurrency": "EUR",
|
||||
"url": "/some/url"
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="yes">Some extra stuff</div>
|
||||
</body></html>
|
||||
"""
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
_test_runner_check_bad_format_ignored(live_server=live_server, client=client, has_ldjson_price_data=False)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
from .util import set_original_response, set_modified_response, live_server_setup
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
|
||||
from flask import url_for
|
||||
from urllib.request import urlopen
|
||||
from zipfile import ZipFile
|
||||
@@ -19,12 +19,12 @@ def test_backup(client, live_server):
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": url_for('test_endpoint', _external=True)},
|
||||
data={"urls": url_for('test_endpoint', _external=True)+"?somechar=őőőőőőőő"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("get_backup"),
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from changedetectionio import html_tools
|
||||
|
||||
def set_original_ignore_response():
|
||||
@@ -61,7 +61,7 @@ def set_modified_response_minus_block_text():
|
||||
|
||||
|
||||
def test_check_block_changedetection_text_NOT_present(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
live_server_setup(live_server)
|
||||
# Use a mix of case in ZzZ to prove it works case-insensitive.
|
||||
ignore_text = "out of stoCk\r\nfoobar"
|
||||
@@ -81,7 +81,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
@@ -96,7 +96,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
# Check it saved
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first"),
|
||||
@@ -107,7 +107,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
@@ -120,7 +120,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
@@ -131,7 +131,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
||||
# Now we set a change where the text is gone, it should now trigger
|
||||
set_modified_response_minus_block_text()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
|
||||
from ..html_tools import *
|
||||
|
||||
@@ -96,7 +96,7 @@ def test_check_markup_include_filters_restriction(client, live_server):
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": include_filters, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -157,7 +157,7 @@ def test_check_multiple_filters(client, live_server):
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": include_filters,
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
@@ -176,3 +176,77 @@ def test_check_multiple_filters(client, live_server):
|
||||
assert b"Blob A" in res.data # CSS was ok
|
||||
assert b"Blob B" in res.data # xPath was ok
|
||||
assert b"Blob C" not in res.data # Should not be included
|
||||
|
||||
# The filter exists, but did not contain anything useful
|
||||
# Mainly used when the filter contains just an IMG, this can happen when someone selects an image in the visual-selector
|
||||
# Tests fetcher can throw a "ReplyWithContentButNoText" exception after applying filter and extracting text
|
||||
def test_filter_is_empty_help_suggestion(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
|
||||
include_filters = "#blob-a"
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("""<html><body>
|
||||
<div id="blob-a">
|
||||
<img src="something.jpg">
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
""")
|
||||
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": include_filters,
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
|
||||
res = client.get(
|
||||
url_for("index"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b'empty result or contain only an image' in res.data
|
||||
|
||||
|
||||
### Just an empty selector, no image
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("""<html><body>
|
||||
<div id="blob-a">
|
||||
<!-- doo doo -->
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
""")
|
||||
|
||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("index"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b'empty result or contain only an image' not in res.data
|
||||
assert b'but contained no usable text' in res.data
|
||||
|
||||
@@ -129,7 +129,7 @@ def test_element_removal_full(client, live_server):
|
||||
data={
|
||||
"subtractive_selectors": subtractive_selectors_data,
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests",
|
||||
},
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
|
||||
from ..html_tools import *
|
||||
|
||||
@@ -55,6 +55,8 @@ def set_multiline_response():
|
||||
</p>
|
||||
|
||||
<div>aaand something lines</div>
|
||||
<br>
|
||||
<div>and this should be</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
@@ -66,11 +68,10 @@ def set_multiline_response():
|
||||
|
||||
|
||||
def test_setup(client, live_server):
|
||||
|
||||
live_server_setup(live_server)
|
||||
|
||||
def test_check_filter_multiline(client, live_server):
|
||||
|
||||
#live_server_setup(live_server)
|
||||
set_multiline_response()
|
||||
|
||||
# Add our URL to the import page
|
||||
@@ -82,16 +83,17 @@ def test_check_filter_multiline(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": '',
|
||||
'extract_text': '/something.+?6 billion.+?lines/si',
|
||||
# Test a regex and a plaintext
|
||||
'extract_text': '/something.+?6 billion.+?lines/si\r\nand this should be',
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"
|
||||
},
|
||||
@@ -99,13 +101,19 @@ def test_check_filter_multiline(client, live_server):
|
||||
)
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
|
||||
# Issue 1828
|
||||
assert b'not at the start of the expression' not in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Plaintext that doesnt look like a regex should match also
|
||||
assert b'and this should be' in res.data
|
||||
|
||||
assert b'<div class="">Something' in res.data
|
||||
assert b'<div class="">across 6 billion multiple' in res.data
|
||||
@@ -115,14 +123,11 @@ def test_check_filter_multiline(client, live_server):
|
||||
assert b'aaand something lines' not in res.data
|
||||
|
||||
def test_check_filter_and_regex_extract(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
include_filters = ".changetext"
|
||||
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
@@ -132,21 +137,17 @@ def test_check_filter_and_regex_extract(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(1)
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": include_filters,
|
||||
'extract_text': '\d+ online\r\n\d+ guests\r\n/somecase insensitive \d+/i\r\n/somecase insensitive (345\d)/i',
|
||||
'extract_text': '/\d+ online/\r\n/\d+ guests/\r\n/somecase insensitive \d+/i\r\n/somecase insensitive (345\d)/i\r\n/issue1828.+?2022/i',
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"
|
||||
},
|
||||
@@ -155,8 +156,13 @@ def test_check_filter_and_regex_extract(client, live_server):
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
#issue 1828
|
||||
assert b'not at the start of the expression' not in res.data
|
||||
|
||||
# Make a change
|
||||
set_modified_response()
|
||||
@@ -164,7 +170,7 @@ def test_check_filter_and_regex_extract(client, live_server):
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
# Because it should be looking at only that 'sametext' id
|
||||
|
||||
@@ -15,7 +15,7 @@ def set_response_without_filter():
|
||||
<p>Which is across multiple lines</p>
|
||||
<br>
|
||||
So let's see what happens. <br>
|
||||
<div id="nope-doesnt-exist">Some text thats the same</div>
|
||||
<div id="nope-doesnt-exist">Some text thats the same</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
@@ -32,7 +32,7 @@ def set_response_with_filter():
|
||||
<p>Which is across multiple lines</p>
|
||||
<br>
|
||||
So let's see what happens. <br>
|
||||
<div class="ticket-available">Ticket now on sale!</div>
|
||||
<div class="ticket-available">Ticket now on sale!</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
@@ -56,7 +56,7 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tag": 'cinema'},
|
||||
data={"url": test_url, "tags": 'cinema'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added" in res.data
|
||||
@@ -84,12 +84,13 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se
|
||||
"Snapshot: {{current_snapshot}}\n"
|
||||
"Diff: {{diff}}\n"
|
||||
"Diff Full: {{diff_full}}\n"
|
||||
"Diff as Patch: {{diff_patch}}\n"
|
||||
":-)",
|
||||
"notification_format": "Text"}
|
||||
|
||||
notification_form_data.update({
|
||||
"url": test_url,
|
||||
"tag": "my tag",
|
||||
"tags": "my tag",
|
||||
"title": "my title",
|
||||
"headers": "",
|
||||
"include_filters": '.ticket-available',
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import os
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import set_original_response, live_server_setup, extract_UUID_from_client
|
||||
from .util import set_original_response, live_server_setup, extract_UUID_from_client, wait_for_all_checks
|
||||
from changedetectionio.model import App
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ def set_response_with_filter():
|
||||
<p>Which is across multiple lines</p>
|
||||
<br>
|
||||
So let's see what happens. <br>
|
||||
<div id="nope-doesnt-exist">Some text thats the same</div>
|
||||
<div id="nope-doesnt-exist">Some text thats the same</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
@@ -37,14 +37,14 @@ def run_filter_test(client, content_filter):
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tag": ''},
|
||||
data={"url": test_url, "tags": ''},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
# Give the thread time to pick up the first version
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
@@ -66,13 +66,14 @@ def run_filter_test(client, content_filter):
|
||||
"Snapshot: {{current_snapshot}}\n"
|
||||
"Diff: {{diff}}\n"
|
||||
"Diff Full: {{diff_full}}\n"
|
||||
"Diff as Patch: {{diff_patch}}\n"
|
||||
":-)",
|
||||
"notification_format": "Text"}
|
||||
|
||||
notification_form_data.update({
|
||||
"url": test_url,
|
||||
"tag": "my tag",
|
||||
"title": "my title",
|
||||
"tags": "my tag",
|
||||
"title": "my title 123",
|
||||
"headers": "",
|
||||
"filter_failure_notification_send": 'y',
|
||||
"include_filters": content_filter,
|
||||
@@ -85,43 +86,55 @@ def run_filter_test(client, content_filter):
|
||||
)
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Now the notification should not exist, because we didnt reach the threshold
|
||||
assert not os.path.isfile("test-datastore/notification.txt")
|
||||
|
||||
for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT):
|
||||
# -2 because we would have checked twice above (on adding and on edit)
|
||||
for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT-2):
|
||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
assert not os.path.isfile("test-datastore/notification.txt"), f"test-datastore/notification.txt should not exist - Attempt {i}"
|
||||
|
||||
# We should see something in the frontend
|
||||
assert b'Warning, no filters were found' in res.data
|
||||
|
||||
# One more check should trigger it (see -2 above)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
# Now it should exist and contain our "filter not found" alert
|
||||
assert os.path.isfile("test-datastore/notification.txt")
|
||||
notification = False
|
||||
|
||||
with open("test-datastore/notification.txt", 'r') as f:
|
||||
notification = f.read()
|
||||
|
||||
assert 'CSS/xPath filter was not present in the page' in notification
|
||||
assert content_filter.replace('"', '\\"') in notification
|
||||
|
||||
# Remove it and prove that it doesnt trigger when not expected
|
||||
# Remove it and prove that it doesn't trigger when not expected
|
||||
# It should register a change, but no 'filter not found'
|
||||
os.unlink("test-datastore/notification.txt")
|
||||
set_response_with_filter()
|
||||
|
||||
# Try several times, it should NOT have 'filter not found'
|
||||
for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT):
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should have sent a notification, but..
|
||||
assert os.path.isfile("test-datastore/notification.txt")
|
||||
# but it should not contain the info about the failed filter
|
||||
# but it should not contain the info about a failed filter (because there was none in this case)
|
||||
with open("test-datastore/notification.txt", 'r') as f:
|
||||
notification = f.read()
|
||||
assert not 'CSS/xPath filter was not present in the page' in notification
|
||||
|
||||
# Re #1247 - All tokens got replaced
|
||||
# Re #1247 - All tokens got replaced correctly in the notification
|
||||
res = client.get(url_for("index"))
|
||||
uuid = extract_UUID_from_client(client)
|
||||
# UUID is correct, but notification contains tag uuid as UUIID wtf
|
||||
assert uuid in notification
|
||||
|
||||
# cleanup for the next
|
||||
@@ -137,7 +150,7 @@ def test_setup(live_server):
|
||||
|
||||
def test_check_include_filters_failure_notification(client, live_server):
|
||||
set_original_response()
|
||||
time.sleep(1)
|
||||
wait_for_all_checks(client)
|
||||
run_filter_test(client, '#nope-doesnt-exist')
|
||||
|
||||
def test_check_xpath_filter_failure_notification(client, live_server):
|
||||
|
||||
323
changedetectionio/tests/test_group.py
Normal file
323
changedetectionio/tests/test_group.py
Normal file
@@ -0,0 +1,323 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, get_UUID_for_tag_name, extract_UUID_from_client
|
||||
import os
|
||||
|
||||
|
||||
def test_setup(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def set_original_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text<br>
|
||||
<p id="only-this">Should be only this</p>
|
||||
<br>
|
||||
<p id="not-this">And never this</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
def set_modified_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text<br>
|
||||
<p id="only-this">Should be REALLY only this</p>
|
||||
<br>
|
||||
<p id="not-this">And never this</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
def test_setup_group_tag(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
set_original_response()
|
||||
|
||||
# Add a tag with some config, import a tag and it should roughly work
|
||||
res = client.post(
|
||||
url_for("tags.form_tag_add"),
|
||||
data={"name": "test-tag"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Tag added" in res.data
|
||||
assert b"test-tag" in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("tags.form_tag_edit_submit", uuid="first"),
|
||||
data={"name": "test-tag",
|
||||
"include_filters": '#only-this',
|
||||
"subtractive_selectors": '#not-this'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated" in res.data
|
||||
tag_uuid = get_UUID_for_tag_name(client, name="test-tag")
|
||||
res = client.get(
|
||||
url_for("tags.form_tag_edit", uuid="first")
|
||||
)
|
||||
assert b"#only-this" in res.data
|
||||
assert b"#not-this" in res.data
|
||||
|
||||
# Tag should be setup and ready, now add a watch
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url + "?first-imported=1 test-tag, extra-import-tag"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'import-tag' in res.data
|
||||
assert b'extra-import-tag' in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("tags.tags_overview_page"),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b'import-tag' in res.data
|
||||
assert b'extra-import-tag' in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'Warning, no filters were found' not in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b'Should be only this' in res.data
|
||||
assert b'And never this' not in res.data
|
||||
|
||||
|
||||
# RSS Group tag filter
|
||||
# An extra one that should be excluded
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url + "?should-be-excluded=1 some-tag"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
set_modified_response()
|
||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
rss_token = extract_rss_token_from_UI(client)
|
||||
res = client.get(
|
||||
url_for("rss", token=rss_token, tag="extra-import-tag", _external=True),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"should-be-excluded" not in res.data
|
||||
assert res.status_code == 200
|
||||
assert b"first-imported=1" in res.data
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_tag_import_singular(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url + " test-tag, test-tag\r\n"+ test_url + "?x=1 test-tag, test-tag\r\n"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"2 Imported" in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("tags.tags_overview_page"),
|
||||
follow_redirects=True
|
||||
)
|
||||
# Should be only 1 tag because they both had the same
|
||||
assert res.data.count(b'test-tag') == 1
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_tag_add_in_ui(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
#
|
||||
res = client.post(
|
||||
url_for("tags.form_tag_add"),
|
||||
data={"name": "new-test-tag"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Tag added" in res.data
|
||||
assert b"new-test-tag" in res.data
|
||||
|
||||
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||
assert b'All tags deleted' in res.data
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_group_tag_notification(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
set_original_response()
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": 'test-tag, other-tag'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json')
|
||||
notification_form_data = {"notification_urls": notification_url,
|
||||
"notification_title": "New GROUP TAG ChangeDetection.io Notification - {{watch_url}}",
|
||||
"notification_body": "BASE URL: {{base_url}}\n"
|
||||
"Watch URL: {{watch_url}}\n"
|
||||
"Watch UUID: {{watch_uuid}}\n"
|
||||
"Watch title: {{watch_title}}\n"
|
||||
"Watch tag: {{watch_tag}}\n"
|
||||
"Preview: {{preview_url}}\n"
|
||||
"Diff URL: {{diff_url}}\n"
|
||||
"Snapshot: {{current_snapshot}}\n"
|
||||
"Diff: {{diff}}\n"
|
||||
"Diff Added: {{diff_added}}\n"
|
||||
"Diff Removed: {{diff_removed}}\n"
|
||||
"Diff Full: {{diff_full}}\n"
|
||||
"Diff as Patch: {{diff_patch}}\n"
|
||||
":-)",
|
||||
"notification_screenshot": True,
|
||||
"notification_format": "Text",
|
||||
"title": "test-tag"}
|
||||
|
||||
res = client.post(
|
||||
url_for("tags.form_tag_edit_submit", uuid=get_UUID_for_tag_name(client, name="test-tag")),
|
||||
data=notification_form_data,
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated" in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
set_modified_response()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(3)
|
||||
|
||||
assert os.path.isfile("test-datastore/notification.txt")
|
||||
|
||||
# Verify what was sent as a notification, this file should exist
|
||||
with open("test-datastore/notification.txt", "r") as f:
|
||||
notification_submission = f.read()
|
||||
os.unlink("test-datastore/notification.txt")
|
||||
|
||||
# Did we see the URL that had a change, in the notification?
|
||||
# Diff was correctly executed
|
||||
assert test_url in notification_submission
|
||||
assert ':-)' in notification_submission
|
||||
assert "Diff Full: Some initial text" in notification_submission
|
||||
assert "New GROUP TAG ChangeDetection.io" in notification_submission
|
||||
assert "test-tag" in notification_submission
|
||||
assert "other-tag" in notification_submission
|
||||
|
||||
#@todo Test that multiple notifications fired
|
||||
#@todo Test that each of multiple notifications with different settings
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_limit_tag_ui(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
urls=[]
|
||||
|
||||
for i in range(20):
|
||||
urls.append(test_url+"?x="+str(i)+" test-tag")
|
||||
|
||||
for i in range(20):
|
||||
urls.append(test_url+"?non-grouped="+str(i))
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": "\r\n".join(urls)},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"40 Imported" in res.data
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'test-tag' in res.data
|
||||
|
||||
# All should be here
|
||||
assert res.data.count(b'processor-text_json_diff') == 40
|
||||
|
||||
tag_uuid = get_UUID_for_tag_name(client, name="test-tag")
|
||||
|
||||
res = client.get(url_for("index", tag=tag_uuid))
|
||||
|
||||
# Just a subset should be here
|
||||
assert b'test-tag' in res.data
|
||||
assert res.data.count(b'processor-text_json_diff') == 20
|
||||
assert b"object at" not in res.data
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||
assert b'All tags deleted' in res.data
|
||||
def test_clone_tag_on_import(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url + " test-tag, another-tag\r\n"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'test-tag' in res.data
|
||||
assert b'another-tag' in res.data
|
||||
|
||||
watch_uuid = extract_UUID_from_client(client)
|
||||
res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True)
|
||||
|
||||
assert b'Cloned' in res.data
|
||||
# 2 times plus the top link to tag
|
||||
assert res.data.count(b'test-tag') == 3
|
||||
assert res.data.count(b'another-tag') == 3
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_clone_tag_on_quickwatchform_add(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": ' test-tag, another-tag '},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'test-tag' in res.data
|
||||
assert b'another-tag' in res.data
|
||||
|
||||
watch_uuid = extract_UUID_from_client(client)
|
||||
res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True)
|
||||
|
||||
assert b'Cloned' in res.data
|
||||
# 2 times plus the top link to tag
|
||||
assert res.data.count(b'test-tag') == 3
|
||||
assert res.data.count(b'another-tag') == 3
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||
assert b'All tags deleted' in res.data
|
||||
@@ -15,11 +15,24 @@ def test_strip_regex_text_func():
|
||||
but sometimes we want to remove the lines.
|
||||
|
||||
but 1 lines
|
||||
skip 5 lines
|
||||
really? yes man
|
||||
#/not this tries weirdly formed regex or just strings starting with /
|
||||
/not this
|
||||
but including 1234 lines
|
||||
igNORe-cAse text we dont want to keep
|
||||
but not always."""
|
||||
|
||||
ignore_lines = ["sometimes", "/\s\d{2,3}\s/", "/ignore-case text/"]
|
||||
|
||||
ignore_lines = [
|
||||
"sometimes",
|
||||
"/\s\d{2,3}\s/",
|
||||
"/ignore-case text/",
|
||||
"really?",
|
||||
"/skip \d lines/i",
|
||||
"/not"
|
||||
]
|
||||
|
||||
|
||||
fetcher = fetch_site_status.perform_site_check(datastore=False)
|
||||
stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines)
|
||||
@@ -27,4 +40,10 @@ def test_strip_regex_text_func():
|
||||
assert b"but 1 lines" in stripped_content
|
||||
assert b"igNORe-cAse text" not in stripped_content
|
||||
assert b"but 1234 lines" not in stripped_content
|
||||
assert b"really" not in stripped_content
|
||||
assert b"not this" not in stripped_content
|
||||
|
||||
# Check line number reporting
|
||||
stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines, mode="line numbers")
|
||||
assert stripped_content == [2, 5, 6, 7, 8, 10]
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from changedetectionio import html_tools
|
||||
|
||||
def test_setup(live_server):
|
||||
@@ -84,7 +84,6 @@ def set_modified_ignore_response():
|
||||
|
||||
|
||||
def test_check_ignore_text_functionality(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
# Use a mix of case in ZzZ to prove it works case-insensitive.
|
||||
ignore_text = "XXXXX\r\nYYYYY\r\nzZzZZ\r\nnew ignore stuff"
|
||||
@@ -103,7 +102,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
@@ -124,7 +123,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
@@ -137,7 +136,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
@@ -151,7 +150,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
||||
# Just to be sure.. set a regular modified change..
|
||||
set_modified_original_ignore_response()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
@@ -167,7 +166,6 @@ def test_check_ignore_text_functionality(client, live_server):
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_check_global_ignore_text_functionality(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
@@ -198,7 +196,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
|
||||
# Goto the edit page of the item, add our ignore text
|
||||
@@ -220,7 +218,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# so that we are sure everything is viewed and in a known 'nothing changed' state
|
||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||
@@ -237,7 +235,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
@@ -247,7 +245,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
||||
# Just to be sure.. set a regular modified change that will trigger it
|
||||
set_modified_original_ignore_response()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
|
||||
57
changedetectionio/tests/test_ignorehighlighter.py
Normal file
57
changedetectionio/tests/test_ignorehighlighter.py
Normal file
@@ -0,0 +1,57 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from changedetectionio import html_tools
|
||||
from . util import extract_UUID_from_client
|
||||
|
||||
def set_original_ignore_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text<br>
|
||||
<p>Which is across multiple lines</p>
|
||||
<br>
|
||||
So let's see what happens. <br>
|
||||
<p>oh yeah 456</p>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
|
||||
def test_highlight_ignore(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
set_original_ignore_response()
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
uuid = extract_UUID_from_client(client)
|
||||
# use the highlighter endpoint
|
||||
res = client.post(
|
||||
url_for("highlight_submit_ignore_url", uuid=uuid),
|
||||
data={"mode": 'digit-regex', 'selection': 'oh yeah 123'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
res = client.get(url_for("edit_page", uuid=uuid))
|
||||
|
||||
# should be a regex now
|
||||
assert b'/oh\ yeah\ \d+/' in res.data
|
||||
|
||||
# Should return a link
|
||||
assert b'href' in res.data
|
||||
|
||||
# And it should register in the preview page
|
||||
res = client.get(url_for("preview_page", uuid=uuid))
|
||||
assert b'<div class="ignored">oh yeah 456' in res.data
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
def test_setup(live_server):
|
||||
@@ -40,7 +40,7 @@ def set_some_changed_response():
|
||||
|
||||
|
||||
def test_normal_page_check_works_with_ignore_status_code(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
@@ -68,15 +68,15 @@ def test_normal_page_check_works_with_ignore_status_code(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
set_some_changed_response()
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
@@ -109,13 +109,13 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server):
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"ignore_status_codes": "y", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"ignore_status_codes": "y", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Make a change
|
||||
set_some_changed_response()
|
||||
@@ -123,7 +123,7 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server):
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
# Because it should be looking at only that 'sametext' id
|
||||
|
||||
@@ -112,6 +112,7 @@ def test_import_distillio(client, live_server):
|
||||
# did the tags work?
|
||||
res = client.get( url_for("index"))
|
||||
|
||||
# check tags
|
||||
assert b"nice stuff" in res.data
|
||||
assert b"nerd-news" in res.data
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ def test_jinja2_in_url_query(client, live_server):
|
||||
"date={% now 'Europe/Berlin', '%Y' %}.{% now 'Europe/Berlin', '%m' %}.{% now 'Europe/Berlin', '%d' %}", )
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": full_url, "tag": "test"},
|
||||
data={"url": full_url, "tags": "test"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
@@ -208,7 +208,7 @@ def test_check_json_without_filter(client, live_server):
|
||||
)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
@@ -238,7 +238,7 @@ def check_json_filter(json_filter, client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
@@ -246,7 +246,7 @@ def check_json_filter(json_filter, client, live_server):
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": json_filter,
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests"
|
||||
},
|
||||
@@ -261,14 +261,14 @@ def check_json_filter(json_filter, client, live_server):
|
||||
assert bytes(escape(json_filter).encode('utf-8')) in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
# Make a change
|
||||
set_modified_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(4)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
res = client.get(url_for("index"))
|
||||
@@ -306,14 +306,14 @@ def check_json_filter_bool_val(json_filter, client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": json_filter,
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests"
|
||||
},
|
||||
@@ -322,14 +322,14 @@ def check_json_filter_bool_val(json_filter, client, live_server):
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
# Make a change
|
||||
set_modified_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||
# But the change should be there, tho its hard to test the change was detected because it will show old and new versions
|
||||
@@ -366,7 +366,7 @@ def check_json_ext_filter(json_filter, client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
@@ -374,7 +374,7 @@ def check_json_ext_filter(json_filter, client, live_server):
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": json_filter,
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests"
|
||||
},
|
||||
@@ -389,14 +389,14 @@ def check_json_ext_filter(json_filter, client, live_server):
|
||||
assert bytes(escape(json_filter).encode('utf-8')) in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
# Make a change
|
||||
set_modified_ext_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(4)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should have 'unviewed'
|
||||
res = client.get(url_for("index"))
|
||||
@@ -428,14 +428,14 @@ def test_ignore_json_order(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write('{"world" : 123, "hello": 123}')
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
@@ -446,7 +446,7 @@ def test_ignore_json_order(client, live_server):
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
@@ -3,7 +3,8 @@ import os
|
||||
import time
|
||||
import re
|
||||
from flask import url_for
|
||||
from . util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup
|
||||
from .util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup, wait_for_all_checks, \
|
||||
set_longer_modified_response
|
||||
from . util import extract_UUID_from_client
|
||||
import logging
|
||||
import base64
|
||||
@@ -21,11 +22,9 @@ def test_setup(live_server):
|
||||
# Hard to just add more live server URLs when one test is already running (I think)
|
||||
# So we add our test here (was in a different file)
|
||||
def test_check_notification(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(3)
|
||||
|
||||
# Re 360 - new install should have defaults set
|
||||
res = client.get(url_for("settings_page"))
|
||||
notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json')
|
||||
@@ -62,13 +61,13 @@ def test_check_notification(client, live_server):
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tag": ''},
|
||||
data={"url": test_url, "tags": ''},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
# Give the thread time to pick up the first version
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# We write the PNG to disk, but a JPEG should appear in the notification
|
||||
# Write the last screenshot png
|
||||
@@ -99,13 +98,14 @@ def test_check_notification(client, live_server):
|
||||
"Diff Added: {{diff_added}}\n"
|
||||
"Diff Removed: {{diff_removed}}\n"
|
||||
"Diff Full: {{diff_full}}\n"
|
||||
"Diff as Patch: {{diff_patch}}\n"
|
||||
":-)",
|
||||
"notification_screenshot": True,
|
||||
"notification_format": "Text"}
|
||||
|
||||
notification_form_data.update({
|
||||
"url": test_url,
|
||||
"tag": "my tag",
|
||||
"tags": "my tag, my second tag",
|
||||
"title": "my title",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests"})
|
||||
@@ -128,7 +128,7 @@ def test_check_notification(client, live_server):
|
||||
|
||||
|
||||
## Now recheck, and it should have sent the notification
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
set_modified_response()
|
||||
|
||||
# Trigger a check
|
||||
@@ -141,8 +141,7 @@ def test_check_notification(client, live_server):
|
||||
|
||||
# Did we see the URL that had a change, in the notification?
|
||||
# Diff was correctly executed
|
||||
assert test_url in notification_submission
|
||||
assert ':-)' in notification_submission
|
||||
|
||||
assert "Diff Full: Some initial text" in notification_submission
|
||||
assert "Diff: (changed) Which is across multiple lines" in notification_submission
|
||||
assert "(into) which has this one new line" in notification_submission
|
||||
@@ -150,12 +149,13 @@ def test_check_notification(client, live_server):
|
||||
assert "b'" not in notification_submission
|
||||
assert re.search('Watch UUID: [0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}', notification_submission, re.IGNORECASE)
|
||||
assert "Watch title: my title" in notification_submission
|
||||
assert "Watch tag: my tag" in notification_submission
|
||||
assert "Watch tag: my tag, my second tag" in notification_submission
|
||||
assert "diff/" in notification_submission
|
||||
assert "preview/" in notification_submission
|
||||
assert ":-)" in notification_submission
|
||||
assert "New ChangeDetection.io Notification - {}".format(test_url) in notification_submission
|
||||
|
||||
assert test_url in notification_submission
|
||||
assert ':-)' in notification_submission
|
||||
# Check the attachment was added, and that it is a JPEG from the original PNG
|
||||
notification_submission_object = json.loads(notification_submission)
|
||||
# We keep PNG screenshots for now
|
||||
@@ -193,11 +193,11 @@ def test_check_notification(client, live_server):
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(1)
|
||||
wait_for_all_checks(client)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(1)
|
||||
wait_for_all_checks(client)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(1)
|
||||
wait_for_all_checks(client)
|
||||
assert os.path.exists("test-datastore/notification.txt") == False
|
||||
|
||||
res = client.get(url_for("notification_logs"))
|
||||
@@ -209,7 +209,7 @@ def test_check_notification(client, live_server):
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "my tag",
|
||||
"tags": "my tag",
|
||||
"title": "my title",
|
||||
"notification_urls": '',
|
||||
"notification_title": '',
|
||||
@@ -243,7 +243,7 @@ def test_notification_validation(client, live_server):
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tag": 'nice one'},
|
||||
data={"url": test_url, "tags": 'nice one'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -274,7 +274,7 @@ def test_notification_validation(client, live_server):
|
||||
|
||||
|
||||
def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
||||
time.sleep(1)
|
||||
#live_server_setup(live_server)
|
||||
|
||||
# test_endpoint - that sends the contents of a file
|
||||
# test_notification_endpoint - that takes a POST and writes it to file (test-datastore/notification.txt)
|
||||
@@ -285,12 +285,14 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
||||
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={"application-notification_title": "New ChangeDetection.io Notification - {{ watch_url }}",
|
||||
"application-notification_body": '{ "url" : "{{ watch_url }}", "secret": 444 }',
|
||||
# https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation
|
||||
"application-notification_urls": test_notification_url,
|
||||
data={
|
||||
"application-fetch_backend": "html_requests",
|
||||
"application-minutes_between_check": 180,
|
||||
"application-fetch_backend": "html_requests"
|
||||
"application-notification_body": '{ "url" : "{{ watch_url }}", "secret": 444 }',
|
||||
"application-notification_format": default_notification_format,
|
||||
"application-notification_urls": test_notification_url,
|
||||
# https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation
|
||||
"application-notification_title": "New ChangeDetection.io Notification - {{ watch_url }}",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -303,21 +305,20 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tag": 'nice one'},
|
||||
data={"url": test_url, "tags": 'nice one'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
set_modified_response()
|
||||
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(2)
|
||||
|
||||
|
||||
with open("test-datastore/notification.txt", 'r') as f:
|
||||
x=f.read()
|
||||
x = f.read()
|
||||
j = json.loads(x)
|
||||
assert j['url'].startswith('http://localhost')
|
||||
assert j['secret'] == 444
|
||||
@@ -328,5 +329,9 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server):
|
||||
notification_url = f.read()
|
||||
assert 'xxx=http' in notification_url
|
||||
|
||||
os.unlink("test-datastore/notification-url.txt")
|
||||
# Should always be automatically detected as JSON content type even when we set it as 'Text' (default)
|
||||
assert os.path.isfile("test-datastore/notification-content-type.txt")
|
||||
with open("test-datastore/notification-content-type.txt", 'r') as f:
|
||||
assert 'application/json' in f.read()
|
||||
|
||||
os.unlink("test-datastore/notification-url.txt")
|
||||
|
||||
@@ -17,7 +17,7 @@ def test_check_notification_error_handling(client, live_server):
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tag": ''},
|
||||
data={"url": test_url, "tags": ''},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added" in res.data
|
||||
@@ -32,7 +32,7 @@ def test_check_notification_error_handling(client, live_server):
|
||||
"notification_body": "xxxxx",
|
||||
"notification_format": "Text",
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"title": "",
|
||||
"headers": "",
|
||||
"time_between_check-minutes": "180",
|
||||
|
||||
@@ -25,7 +25,7 @@ def test_headers_in_request(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(1)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
@@ -43,7 +43,7 @@ def test_headers_in_request(client, live_server):
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
|
||||
"headers": "xxx:ooo\ncool:yeah\r\ncookie:"+cookie_header},
|
||||
follow_redirects=True
|
||||
@@ -95,14 +95,14 @@ def test_body_in_request(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# add the first 'version'
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"method": "POST",
|
||||
"fetch_backend": "html_requests",
|
||||
"body": "something something"},
|
||||
@@ -110,7 +110,7 @@ def test_body_in_request(client, live_server):
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Now the change which should trigger a change
|
||||
body_value = 'Test Body Value'
|
||||
@@ -118,7 +118,7 @@ def test_body_in_request(client, live_server):
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"method": "POST",
|
||||
"fetch_backend": "html_requests",
|
||||
"body": body_value},
|
||||
@@ -126,7 +126,7 @@ def test_body_in_request(client, live_server):
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# The service should echo back the body
|
||||
res = client.get(
|
||||
@@ -163,7 +163,7 @@ def test_body_in_request(client, live_server):
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"method": "GET",
|
||||
"fetch_backend": "html_requests",
|
||||
"body": "invalid"},
|
||||
@@ -187,7 +187,7 @@ def test_method_in_request(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
@@ -195,14 +195,14 @@ def test_method_in_request(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Attempt to add a method which is not valid
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"method": "invalid"},
|
||||
follow_redirects=True
|
||||
@@ -214,7 +214,7 @@ def test_method_in_request(client, live_server):
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"method": "PATCH"},
|
||||
follow_redirects=True
|
||||
@@ -222,7 +222,7 @@ def test_method_in_request(client, live_server):
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Give the thread time to pick up the first version
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# The service should echo back the request verb
|
||||
res = client.get(
|
||||
@@ -233,7 +233,7 @@ def test_method_in_request(client, live_server):
|
||||
# The test call service will return the verb as the body
|
||||
assert b"PATCH" in res.data
|
||||
|
||||
time.sleep(2)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
watches_with_method = 0
|
||||
with open('test-datastore/url-watches.json') as f:
|
||||
@@ -265,7 +265,7 @@ def test_headers_textfile_in_request(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(1)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
|
||||
# Add some headers to a request
|
||||
@@ -273,7 +273,7 @@ def test_headers_textfile_in_request(client, live_server):
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "testtag",
|
||||
"tags": "testtag",
|
||||
"fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests',
|
||||
"headers": "xxx:ooo\ncool:yeah\r\n"},
|
||||
follow_redirects=True
|
||||
|
||||
74
changedetectionio/tests/test_search.py
Normal file
74
changedetectionio/tests/test_search.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from flask import url_for
|
||||
from .util import set_original_response, set_modified_response, live_server_setup
|
||||
import time
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def test_basic_search(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
|
||||
urls = ['https://localhost:12300?first-result=1',
|
||||
'https://localhost:5000?second-result=1'
|
||||
]
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": "\r\n".join(urls)},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"2 Imported" in res.data
|
||||
|
||||
# By URL
|
||||
res = client.get(url_for("index") + "?q=first-res")
|
||||
assert urls[0].encode('utf-8') in res.data
|
||||
assert urls[1].encode('utf-8') not in res.data
|
||||
|
||||
# By Title
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"title": "xxx-title", "url": urls[0], "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
res = client.get(url_for("index") + "?q=xxx-title")
|
||||
assert urls[0].encode('utf-8') in res.data
|
||||
assert urls[1].encode('utf-8') not in res.data
|
||||
|
||||
|
||||
def test_search_in_tag_limit(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
|
||||
urls = ['https://localhost:12300?first-result=1 tag-one',
|
||||
'https://localhost:5000?second-result=1 tag-two'
|
||||
]
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": "\r\n".join(urls)},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"2 Imported" in res.data
|
||||
|
||||
# By URL
|
||||
|
||||
res = client.get(url_for("index") + "?q=first-res")
|
||||
# Split because of the import tag separation
|
||||
assert urls[0].split(' ')[0].encode('utf-8') in res.data, urls[0].encode('utf-8')
|
||||
assert urls[1].split(' ')[0].encode('utf-8') not in res.data, urls[0].encode('utf-8')
|
||||
|
||||
# By Title
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"title": "xxx-title", "url": urls[0].split(' ')[0], "tags": urls[0].split(' ')[1], "headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
res = client.get(url_for("index") + "?q=xxx-title")
|
||||
assert urls[0].split(' ')[0].encode('utf-8') in res.data, urls[0].encode('utf-8')
|
||||
assert urls[1].split(' ')[0].encode('utf-8') not in res.data, urls[0].encode('utf-8')
|
||||
|
||||
@@ -18,7 +18,7 @@ def test_bad_access(client, live_server):
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": 'javascript:alert(document.domain)',
|
||||
"tag": "",
|
||||
"tags": "",
|
||||
"method": "GET",
|
||||
"fetch_backend": "html_requests",
|
||||
"body": ""},
|
||||
@@ -29,7 +29,7 @@ def test_bad_access(client, live_server):
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": ' javascript:alert(123)', "tag": ''},
|
||||
data={"url": ' javascript:alert(123)', "tags": ''},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -37,7 +37,7 @@ def test_bad_access(client, live_server):
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": '%20%20%20javascript:alert(123)%20%20', "tag": ''},
|
||||
data={"url": '%20%20%20javascript:alert(123)%20%20', "tags": ''},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -46,7 +46,7 @@ def test_bad_access(client, live_server):
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": ' source:javascript:alert(document.domain)', "tag": ''},
|
||||
data={"url": ' source:javascript:alert(document.domain)', "tags": ''},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -56,7 +56,7 @@ def test_bad_access(client, live_server):
|
||||
|
||||
client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": 'file:///tasty/disk/drive', "tag": ''},
|
||||
data={"url": 'file:///tasty/disk/drive', "tags": ''},
|
||||
follow_redirects=True
|
||||
)
|
||||
time.sleep(1)
|
||||
|
||||
@@ -29,7 +29,7 @@ def test_share_watch(client, live_server):
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": include_filters, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import time
|
||||
from flask import url_for
|
||||
from urllib.request import urlopen
|
||||
from .util import set_original_response, set_modified_response, live_server_setup
|
||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
|
||||
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
@@ -42,7 +42,7 @@ def test_check_basic_change_detection_functionality_source(client, live_server):
|
||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
assert b'1 watches queued for rechecking.' in res.data
|
||||
|
||||
time.sleep(5)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Now something should be ready, indicated by having a 'unviewed' class
|
||||
res = client.get(url_for("index"))
|
||||
@@ -60,7 +60,7 @@ def test_check_basic_change_detection_functionality_source(client, live_server):
|
||||
# `subtractive_selectors` should still work in `source:` type requests
|
||||
def test_check_ignore_elements(client, live_server):
|
||||
set_original_response()
|
||||
time.sleep(2)
|
||||
time.sleep(1)
|
||||
test_url = 'source:'+url_for('test_endpoint', _external=True)
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
@@ -71,14 +71,14 @@ def test_check_ignore_elements(client, live_server):
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
#####################
|
||||
# We want <span> and <p> ONLY, but ignore span with .foobar-detection
|
||||
|
||||
client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": 'span,p', "url": test_url, "tag": "", "subtractive_selectors": ".foobar-detection", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": 'span,p', "url": test_url, "tags": "", "subtractive_selectors": ".foobar-detection", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
def set_original_ignore_response():
|
||||
@@ -26,13 +26,8 @@ def test_trigger_regex_functionality(client, live_server):
|
||||
|
||||
live_server_setup(live_server)
|
||||
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
set_original_ignore_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
@@ -43,7 +38,7 @@ def test_trigger_regex_functionality(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (just a new one shouldnt have anything)
|
||||
res = client.get(url_for("index"))
|
||||
@@ -57,7 +52,7 @@ def test_trigger_regex_functionality(client, live_server):
|
||||
"fetch_backend": "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
# so that we set the state to 'unviewed' after all the edits
|
||||
client.get(url_for("diff_history_page", uuid="first"))
|
||||
|
||||
@@ -65,7 +60,7 @@ def test_trigger_regex_functionality(client, live_server):
|
||||
f.write("some new noise")
|
||||
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should report nothing found (nothing should match the regex)
|
||||
res = client.get(url_for("index"))
|
||||
@@ -75,7 +70,7 @@ def test_trigger_regex_functionality(client, live_server):
|
||||
f.write("regex test123<br>\nsomething 123")
|
||||
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ def test_check_watch_field_storage(client, live_server):
|
||||
"title" : "My title",
|
||||
"ignore_text" : "ignore this",
|
||||
"url": test_url,
|
||||
"tag": "woohoo",
|
||||
"tags": "woohoo",
|
||||
"headers": "curl:foo",
|
||||
'fetch_backend': "html_requests"
|
||||
},
|
||||
|
||||
@@ -89,7 +89,7 @@ def test_check_xpath_filter_utf8(client, live_server):
|
||||
time.sleep(1)
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -143,7 +143,7 @@ def test_check_xpath_text_function_utf8(client, live_server):
|
||||
time.sleep(1)
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -189,7 +189,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": xpath_filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": xpath_filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
@@ -231,7 +231,7 @@ def test_xpath_validation(client, live_server):
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": "/something horrible", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": "/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"is not a valid XPath expression" in res.data
|
||||
@@ -261,7 +261,7 @@ def test_check_with_prefix_include_filters(client, live_server):
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
data={"include_filters": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ class TestDiffBuilder(unittest.TestCase):
|
||||
output = output.split("\n")
|
||||
self.assertIn('(removed) for having learned computerese,', output)
|
||||
self.assertIn('(removed) I continue to examine bits, bytes and words', output)
|
||||
|
||||
|
||||
#diff_removed
|
||||
with open(base_dir + "/test-content/before.txt", 'r') as f:
|
||||
previous_version_file_contents = f.read()
|
||||
@@ -49,7 +49,7 @@ class TestDiffBuilder(unittest.TestCase):
|
||||
self.assertIn('(into) xok', output)
|
||||
self.assertIn('(into) next-x-ok', output)
|
||||
self.assertNotIn('(added) and something new', output)
|
||||
|
||||
|
||||
#diff_removed
|
||||
with open(base_dir + "/test-content/after-2.txt", 'r') as f:
|
||||
newest_version_file_contents = f.read()
|
||||
@@ -57,9 +57,25 @@ class TestDiffBuilder(unittest.TestCase):
|
||||
output = output.split("\n")
|
||||
self.assertIn('(removed) for having learned computerese,', output)
|
||||
self.assertIn('(removed) I continue to examine bits, bytes and words', output)
|
||||
|
||||
|
||||
def test_expected_diff_patch_output(self):
|
||||
base_dir = os.path.dirname(__file__)
|
||||
with open(base_dir + "/test-content/before.txt", 'r') as f:
|
||||
before = f.read()
|
||||
with open(base_dir + "/test-content/after.txt", 'r') as f:
|
||||
after = f.read()
|
||||
|
||||
output = diff.render_diff(previous_version_file_contents=before,
|
||||
newest_version_file_contents=after,
|
||||
patch_format=True)
|
||||
output = output.split("\n")
|
||||
|
||||
self.assertIn('-ok', output)
|
||||
self.assertIn('+xok', output)
|
||||
self.assertIn('+next-x-ok', output)
|
||||
self.assertIn('+and something new', output)
|
||||
|
||||
# @todo test blocks of changed, blocks of added, blocks of removed
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
unittest.main()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user