mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-11-01 15:17:20 +00:00
Compare commits
322 Commits
0.39
...
API-interf
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fba4b6747e | ||
|
|
4cf2d9d7aa | ||
|
|
6bc1c681ec | ||
|
|
1267512858 | ||
|
|
886ef0c7c1 | ||
|
|
97c6db5e56 | ||
|
|
23dde28399 | ||
|
|
91fe2dd420 | ||
|
|
408c8878f3 | ||
|
|
37614224e5 | ||
|
|
1caff23d2c | ||
|
|
b01ee24d55 | ||
|
|
dbb5c666f0 | ||
|
|
70b3493866 | ||
|
|
3b11c474d1 | ||
|
|
890e1e6dcd | ||
|
|
6734fb91a2 | ||
|
|
16809b48f8 | ||
|
|
67c833d2bc | ||
|
|
31fea55ee4 | ||
|
|
b6c50d3b1a | ||
|
|
034507f14f | ||
|
|
0e385b1c22 | ||
|
|
f28c260576 | ||
|
|
18f0b63b7d | ||
|
|
97045e7a7b | ||
|
|
9807cf0cda | ||
|
|
d4b5237103 | ||
|
|
dc6f76ba64 | ||
|
|
1f2f93184e | ||
|
|
0f08c8dda3 | ||
|
|
68db20168e | ||
|
|
1d4474f5a3 | ||
|
|
613308881c | ||
|
|
f69585b276 | ||
|
|
0179940df1 | ||
|
|
c0d0424e7e | ||
|
|
014dc61222 | ||
|
|
06517bfd22 | ||
|
|
b3a115dd4a | ||
|
|
ffc4215411 | ||
|
|
9e708810d1 | ||
|
|
1e8aa6158b | ||
|
|
015353eccc | ||
|
|
501183e66b | ||
|
|
def74f27e6 | ||
|
|
37775a46c6 | ||
|
|
e4eaa0c817 | ||
|
|
206ded4201 | ||
|
|
9e71f2aa35 | ||
|
|
f9594aeffb | ||
|
|
b4e1353376 | ||
|
|
5b670c38d3 | ||
|
|
2a9fb12451 | ||
|
|
6c3c5dc28a | ||
|
|
8f062bfec9 | ||
|
|
380c512cc2 | ||
|
|
d7ed7c44ed | ||
|
|
34a87c0f41 | ||
|
|
4074fe53f1 | ||
|
|
44d599d0d1 | ||
|
|
615fe9290a | ||
|
|
2cc6955bc3 | ||
|
|
9809af142d | ||
|
|
1890881977 | ||
|
|
9fc2fe85d5 | ||
|
|
bb3c546838 | ||
|
|
165f794595 | ||
|
|
a440eece9e | ||
|
|
34c83f0e7c | ||
|
|
f6e518497a | ||
|
|
63e91a3d66 | ||
|
|
3034d047c2 | ||
|
|
2620818ba7 | ||
|
|
9fe4f95990 | ||
|
|
ffd2a89d60 | ||
|
|
8f40f19328 | ||
|
|
082634f851 | ||
|
|
334010025f | ||
|
|
81aa8fa16b | ||
|
|
c79d6824e3 | ||
|
|
946377d2be | ||
|
|
5db9a30ad4 | ||
|
|
1d060225e1 | ||
|
|
7e0f0d0fd8 | ||
|
|
8b2afa2220 | ||
|
|
f55ffa0f62 | ||
|
|
942c3f021f | ||
|
|
5483f5d694 | ||
|
|
f2fa638480 | ||
|
|
82d1a7f73e | ||
|
|
9fc291fb63 | ||
|
|
3e8a15456a | ||
|
|
2a03f3f57e | ||
|
|
ffad5cca97 | ||
|
|
60a9a786e0 | ||
|
|
165e950e55 | ||
|
|
c25294ca57 | ||
|
|
d4359c2e67 | ||
|
|
44fc804991 | ||
|
|
b72c9eaf62 | ||
|
|
7ce9e4dfc2 | ||
|
|
3cc6586695 | ||
|
|
09204cb43f | ||
|
|
a709122874 | ||
|
|
efbeaf9535 | ||
|
|
1a19fba07d | ||
|
|
eb9020c175 | ||
|
|
13bb44e4f8 | ||
|
|
47f294c23b | ||
|
|
a4cce16188 | ||
|
|
69aec23d1d | ||
|
|
f85ccffe0a | ||
|
|
0005131472 | ||
|
|
3be1f4ea44 | ||
|
|
46c72a7fb3 | ||
|
|
96664ffb10 | ||
|
|
615fa2c5b2 | ||
|
|
fd45fcce2f | ||
|
|
75ca7ec504 | ||
|
|
8b1e9f6591 | ||
|
|
883aa968fd | ||
|
|
3240ed2339 | ||
|
|
a89ffffc76 | ||
|
|
fda93c3798 | ||
|
|
a51c555964 | ||
|
|
b401998030 | ||
|
|
014fda9058 | ||
|
|
dd384619e0 | ||
|
|
85715120e2 | ||
|
|
a0e4f9b88a | ||
|
|
04bef6091e | ||
|
|
536948c8c6 | ||
|
|
d4f4ab306a | ||
|
|
8d2e240a2a | ||
|
|
d7ed479ca2 | ||
|
|
f25cdf0a67 | ||
|
|
5214a7e0f3 | ||
|
|
eb3dca3805 | ||
|
|
a580c238b6 | ||
|
|
7ca89f5ec3 | ||
|
|
8ab8aaa6ae | ||
|
|
22ef9afb93 | ||
|
|
abaec224f6 | ||
|
|
5a645fb74d | ||
|
|
14db60e518 | ||
|
|
e250c552d0 | ||
|
|
8e54a17e14 | ||
|
|
8607eccaad | ||
|
|
17511d0d7d | ||
|
|
41b806228c | ||
|
|
453cf81e1d | ||
|
|
0095b28ea3 | ||
|
|
73101a47e7 | ||
|
|
03f776ca45 | ||
|
|
39b7be9e7a | ||
|
|
6611823962 | ||
|
|
c1c453e4fe | ||
|
|
4887180671 | ||
|
|
ac7378b7fb | ||
|
|
eeba8c864d | ||
|
|
abe88192f4 | ||
|
|
af8efbb6d2 | ||
|
|
bbc2875ef3 | ||
|
|
b7ca10ebac | ||
|
|
a896493797 | ||
|
|
e5fe095f16 | ||
|
|
271181968f | ||
|
|
8206383ee5 | ||
|
|
ecfc02ba23 | ||
|
|
3331ccd061 | ||
|
|
bd8f389a65 | ||
|
|
bc74227635 | ||
|
|
07c60a6acc | ||
|
|
7916faf58b | ||
|
|
febb2bbf0d | ||
|
|
59d31bf76f | ||
|
|
f87f7077a6 | ||
|
|
f166ab1e30 | ||
|
|
55e679e973 | ||
|
|
e211ba806f | ||
|
|
b33105d576 | ||
|
|
b73f5a5c88 | ||
|
|
023951a10e | ||
|
|
fbd9ecab62 | ||
|
|
b5c1fce136 | ||
|
|
489671dcca | ||
|
|
d4dc3466dc | ||
|
|
0439acacbe | ||
|
|
735fc2ac8e | ||
|
|
8a825f0055 | ||
|
|
d0ae8b7923 | ||
|
|
a504773941 | ||
|
|
feb8e6c76c | ||
|
|
a37a5038d8 | ||
|
|
f1933b786c | ||
|
|
d6a6ef2c1d | ||
|
|
cf9554b169 | ||
|
|
d602cf4646 | ||
|
|
dfcae4ee64 | ||
|
|
e3bcd8c9bf | ||
|
|
c4990fa3f9 | ||
|
|
98461d813e | ||
|
|
8ec17a4c83 | ||
|
|
ee708cc395 | ||
|
|
8a670c029a | ||
|
|
9fa5aec01e | ||
|
|
43c9cb8b0c | ||
|
|
b6a359d55b | ||
|
|
ae5a88beea | ||
|
|
a899d338e9 | ||
|
|
7975e8ec2e | ||
|
|
ce383bcd04 | ||
|
|
0b0cdb101b | ||
|
|
396509bae8 | ||
|
|
2973f40035 | ||
|
|
067fac862c | ||
|
|
20647ea319 | ||
|
|
fafc7fda62 | ||
|
|
b1aaf9f277 | ||
|
|
18987aeb23 | ||
|
|
856789a9ba | ||
|
|
2857c7bb77 | ||
|
|
df951637c4 | ||
|
|
ba6fe076bb | ||
|
|
9815fc2526 | ||
|
|
e71dbbe771 | ||
|
|
bd222c99c6 | ||
|
|
4b002ad9e0 | ||
|
|
fe2ffd6356 | ||
|
|
266bebb5bc | ||
|
|
115ff5bc2e | ||
|
|
dd6a24d337 | ||
|
|
f0d418d58c | ||
|
|
10d3b09051 | ||
|
|
35d0c74454 | ||
|
|
dd450b81ad | ||
|
|
512d76c52b | ||
|
|
5a10acfd09 | ||
|
|
a7c09c8990 | ||
|
|
9235eae608 | ||
|
|
5bbd82be79 | ||
|
|
7f8c0fb2fa | ||
|
|
489eedf34e | ||
|
|
3956b3fd68 | ||
|
|
61c1d213d0 | ||
|
|
e07f573f64 | ||
|
|
ecba130fdb | ||
|
|
ff6dc842c0 | ||
|
|
4659993ecf | ||
|
|
0a29b3a582 | ||
|
|
c55bf418c5 | ||
|
|
4bbb7d99b6 | ||
|
|
a8e92e2226 | ||
|
|
c17327633f | ||
|
|
56d1dde7c3 | ||
|
|
6e4ddacaf8 | ||
|
|
3195ffa1c6 | ||
|
|
c749d2ee44 | ||
|
|
ec94359f3c | ||
|
|
4d0bd58eb1 | ||
|
|
3525f43469 | ||
|
|
d70252c1eb | ||
|
|
b57b94c63a | ||
|
|
9e914c140e | ||
|
|
5d5ceb2f52 | ||
|
|
bc0303c5da | ||
|
|
1240da4a6e | ||
|
|
4267bda853 | ||
|
|
db1ff1843c | ||
|
|
fe3c20b618 | ||
|
|
2fa93cba3a | ||
|
|
254fbd5a47 | ||
|
|
18f2318572 | ||
|
|
84417fc2b1 | ||
|
|
7f7fc737b3 | ||
|
|
2dc43bdfd3 | ||
|
|
95e39aa727 | ||
|
|
2c71f577e0 | ||
|
|
f987d32c72 | ||
|
|
cd7df86f54 | ||
|
|
cb8fa2583a | ||
|
|
3d3e5db81c | ||
|
|
c9860dc55e | ||
|
|
dbd5cf117a | ||
|
|
e805d6ebe3 | ||
|
|
01f469d91d | ||
|
|
e91cab0c6d | ||
|
|
106c3269a6 | ||
|
|
1628602860 | ||
|
|
ca0ab50c5e | ||
|
|
df0b7bb0fe | ||
|
|
fe59ac4986 | ||
|
|
25476bfcb2 | ||
|
|
6901fc493d | ||
|
|
c40417ff96 | ||
|
|
fd2d938528 | ||
|
|
cd20dea590 | ||
|
|
f921e98265 | ||
|
|
c0e905265c | ||
|
|
5e6a923c35 | ||
|
|
7618081e83 | ||
|
|
b903280cd0 | ||
|
|
5b60314e8b | ||
|
|
dfd34d2a5b | ||
|
|
98f6f0c80d | ||
|
|
8c65c60c27 | ||
|
|
bd0d9048e7 | ||
|
|
3b14be4fef | ||
|
|
05f7e123ed | ||
|
|
54d80ddea0 | ||
|
|
b9e0ad052f | ||
|
|
f8937e437a | ||
|
|
fbe9270528 | ||
|
|
58c3bc371d | ||
|
|
4683b0d120 | ||
|
|
5fb9bbdfa3 | ||
|
|
5883e5b920 | ||
|
|
b99957f54a | ||
|
|
21cb7fbca9 | ||
|
|
4ed5d4c2e7 | ||
|
|
8c3163f459 |
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**Version**
|
||||
In the top right area: 0....
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Desktop (please complete the following information):**
|
||||
- OS: [e.g. iOS]
|
||||
- Browser [e.g. chrome, safari]
|
||||
- Version [e.g. 22]
|
||||
|
||||
**Smartphone (please complete the following information):**
|
||||
- Device: [e.g. iPhone6]
|
||||
- OS: [e.g. iOS8.1]
|
||||
- Browser [e.g. stock browser, safari]
|
||||
- Version [e.g. 22]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
23
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
23
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
**Version and OS**
|
||||
For example, 0.123 on linux/docker
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe the use-case and give concrete real-world examples**
|
||||
Attach any HTML/JSON, give links to sites, screenshots etc, we are not mind readers
|
||||
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
129
.github/workflows/containers.yml
vendored
Normal file
129
.github/workflows/containers.yml
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
name: Build and push containers
|
||||
|
||||
on:
|
||||
# Automatically triggered by a testing workflow passing, but this is only checked when it lands in the `master`/default branch
|
||||
# workflow_run:
|
||||
# workflows: ["ChangeDetection.io Test"]
|
||||
# branches: [master]
|
||||
# tags: ['0.*']
|
||||
# types: [completed]
|
||||
|
||||
# Or a new tagged release
|
||||
release:
|
||||
types: [published, edited]
|
||||
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
metadata:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Show metadata
|
||||
run: |
|
||||
echo SHA ${{ github.sha }}
|
||||
echo github.ref: ${{ github.ref }}
|
||||
echo github_ref: $GITHUB_REF
|
||||
echo Event name: ${{ github.event_name }}
|
||||
echo Ref ${{ github.ref }}
|
||||
echo c: ${{ github.event.workflow_run.conclusion }}
|
||||
echo r: ${{ github.event.workflow_run }}
|
||||
echo tname: "${{ github.event.release.tag_name }}"
|
||||
echo headbranch: -${{ github.event.workflow_run.head_branch }}-
|
||||
set
|
||||
|
||||
build-push-containers:
|
||||
runs-on: ubuntu-latest
|
||||
# If the testing workflow has a success, then we build to :latest
|
||||
# Or if we are in a tagged release scenario.
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }} || ${{ github.event.release.tag_name }} != ''
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
|
||||
|
||||
- name: Create release metadata
|
||||
run: |
|
||||
# COPY'ed by Dockerfile into changedetectionio/ of the image, then read by the server in store.py
|
||||
echo ${{ github.sha }} > changedetectionio/source.txt
|
||||
echo ${{ github.ref }} > changedetectionio/tag.txt
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
version: latest
|
||||
driver-opts: image=moby/buildkit:master
|
||||
|
||||
# master always builds :latest
|
||||
- name: Build and push :latest
|
||||
id: docker_build
|
||||
if: ${{ github.ref }} == "refs/heads/master"
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest,ghcr.io/${{ github.repository }}:latest
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
# A new tagged release is required, which builds :tag
|
||||
- name: Build and push :tag
|
||||
id: docker_build_tag_release
|
||||
if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.')
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:${{ github.event.release.tag_name }},ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
- name: Image digest
|
||||
run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
|
||||
88
.github/workflows/image-javascript.yml
vendored
88
.github/workflows/image-javascript.yml
vendored
@@ -1,88 +0,0 @@
|
||||
name: Javascript/Webdriver support - Test, build and push to Docker Hub :javascript tag
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ javascript-browser ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
|
||||
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
|
||||
- name: Create release metadata
|
||||
run: |
|
||||
# COPY'ed by Dockerfile into changedetectionio/ of the image, then read by the server in store.py
|
||||
echo ${{ github.sha }} > changedetectionio/source.txt
|
||||
echo ${{ github.ref }} > changedetectionio/tag.txt
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
# Each test is totally isolated and performs its own cleanup/reset
|
||||
cd changedetectionio; ./run_all_tests.sh
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
version: latest
|
||||
driver-opts: image=moby/buildkit:master
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:javascript-dev
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
- name: Image digest
|
||||
run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}
|
||||
|
||||
# failed: Cache service responded with 503
|
||||
# - name: Cache Docker layers
|
||||
# uses: actions/cache@v2
|
||||
# with:
|
||||
# path: /tmp/.buildx-cache
|
||||
# key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
# restore-keys: |
|
||||
# ${{ runner.os }}-buildx-
|
||||
|
||||
|
||||
92
.github/workflows/image-tag.yml
vendored
92
.github/workflows/image-tag.yml
vendored
@@ -1,92 +0,0 @@
|
||||
name: Test, build and push tagged release to Docker Hub
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*.*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- uses: olegtarasov/get-tag@v2.1
|
||||
id: tagName
|
||||
|
||||
# with:
|
||||
# tagRegex: "foobar-(.*)" # Optional. Returns specified group text as tag name. Full tag string is returned if regex is not defined.
|
||||
# tagRegexGroup: 1 # Optional. Default is 1.
|
||||
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
|
||||
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
|
||||
- name: Create release metadata
|
||||
run: |
|
||||
# COPY'ed by Dockerfile into changedetectionio/ of the image, then read by the server in store.py
|
||||
echo ${{ github.sha }} > changedetectionio/source.txt
|
||||
echo ${{ github.ref }} > changedetectionio/tag.txt
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
# Each test is totally isolated and performs its own cleanup/reset
|
||||
cd changedetectionio; ./run_all_tests.sh
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
version: latest
|
||||
driver-opts: image=moby/buildkit:master
|
||||
|
||||
- name: tag
|
||||
run : echo ${{ github.event.release.tag_name }}
|
||||
|
||||
- name: Build and push tagged version
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:${{ steps.tagName.outputs.tag }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
env:
|
||||
SOURCE_NAME: ${{ steps.branch_name.outputs.SOURCE_NAME }}
|
||||
SOURCE_BRANCH: ${{ steps.branch_name.outputs.SOURCE_BRANCH }}
|
||||
SOURCE_TAG: ${{ steps.branch_name.outputs.SOURCE_TAG }}
|
||||
|
||||
- name: Image digest
|
||||
run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}
|
||||
88
.github/workflows/image.yml
vendored
88
.github/workflows/image.yml
vendored
@@ -1,88 +0,0 @@
|
||||
name: Test, build and push to Docker Hub
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, arm-build ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
|
||||
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
|
||||
- name: Create release metadata
|
||||
run: |
|
||||
# COPY'ed by Dockerfile into changedetectionio/ of the image, then read by the server in store.py
|
||||
echo ${{ github.sha }} > changedetectionio/source.txt
|
||||
echo ${{ github.ref }} > changedetectionio/tag.txt
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
# Each test is totally isolated and performs its own cleanup/reset
|
||||
cd changedetectionio; ./run_all_tests.sh
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
version: latest
|
||||
driver-opts: image=moby/buildkit:master
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
- name: Image digest
|
||||
run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}
|
||||
|
||||
# failed: Cache service responded with 503
|
||||
# - name: Cache Docker layers
|
||||
# uses: actions/cache@v2
|
||||
# with:
|
||||
# path: /tmp/.buildx-cache
|
||||
# key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
# restore-keys: |
|
||||
# ${{ runner.os }}-buildx-
|
||||
|
||||
|
||||
44
.github/workflows/pypi.yml
vendored
Normal file
44
.github/workflows/pypi.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: PyPi Test and Push tagged release
|
||||
|
||||
# Triggers the workflow on push or pull request events
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["ChangeDetection.io Test"]
|
||||
tags: '*.*'
|
||||
types: [completed]
|
||||
|
||||
|
||||
jobs:
|
||||
test-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
# - name: Install dependencies
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip
|
||||
# pip install flake8 pytest
|
||||
# if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
# if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
|
||||
|
||||
- name: Test that pip builds without error
|
||||
run: |
|
||||
pip3 --version
|
||||
python3 -m pip install wheel
|
||||
python3 setup.py bdist_wheel
|
||||
python3 -m pip install dist/changedetection.io-*-none-any.whl --force
|
||||
changedetection.io -d /tmp -p 10000 &
|
||||
sleep 3
|
||||
curl http://127.0.0.1:10000/static/styles/pure-min.css >/dev/null
|
||||
killall -9 changedetection.io
|
||||
|
||||
# https://github.com/docker/build-push-action/blob/master/docs/advanced/test-before-push.md ?
|
||||
# https://github.com/docker/buildx/issues/59 ? Needs to be one platform?
|
||||
|
||||
# https://github.com/docker/buildx/issues/495#issuecomment-918925854
|
||||
#if: ${{ github.event_name == 'release'}}
|
||||
23
.github/workflows/test-only.yml
vendored
23
.github/workflows/test-only.yml
vendored
@@ -4,7 +4,7 @@ name: ChangeDetection.io Test
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
test-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
@@ -14,6 +14,9 @@ jobs:
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Show env vars
|
||||
run: set
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
@@ -27,20 +30,16 @@ jobs:
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
|
||||
- name: Unit tests
|
||||
run: |
|
||||
python3 -m unittest changedetectionio.tests.unit.test_notification_diff
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
# Each test is totally isolated and performs its own cleanup/reset
|
||||
cd changedetectionio; ./run_all_tests.sh
|
||||
|
||||
- name: Test that pip builds without error
|
||||
run: |
|
||||
pip3 --version
|
||||
python3 -m pip install wheel
|
||||
python3 setup.py bdist_wheel
|
||||
python3 -m pip install dist/changedetection.io-*-none-any.whl --force
|
||||
changedetection.io -d /tmp -p 10000 &
|
||||
sleep 3
|
||||
curl http://127.0.0.1:10000/static/styles/pure-min.css >/dev/null
|
||||
killall -9 changedetection.io
|
||||
|
||||
# https://github.com/docker/build-push-action/blob/master/docs/advanced/test-before-push.md ?
|
||||
# https://github.com/docker/buildx/issues/59 ? Needs to be one platform?
|
||||
|
||||
# https://github.com/docker/buildx/issues/495#issuecomment-918925854
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -7,3 +7,6 @@ __pycache__
|
||||
.pytest_cache
|
||||
build
|
||||
dist
|
||||
venv
|
||||
*.egg-info*
|
||||
.vscode/settings.json
|
||||
|
||||
15
CONTRIBUTING.md
Normal file
15
CONTRIBUTING.md
Normal file
@@ -0,0 +1,15 @@
|
||||
Contributing is always welcome!
|
||||
|
||||
I am no professional flask developer, if you know a better way that something can be done, please let me know!
|
||||
|
||||
Otherwise, it's always best to PR into the `dev` branch.
|
||||
|
||||
Please be sure that all new functionality has a matching test!
|
||||
|
||||
Use `pytest` to validate/test, you can run the existing tests as `pytest tests/test_notifications.py` for example
|
||||
|
||||
```
|
||||
pip3 install -r requirements-dev
|
||||
```
|
||||
|
||||
this is from https://github.com/dgtlmoon/changedetection.io/blob/master/requirements-dev.txt
|
||||
10
Dockerfile
10
Dockerfile
@@ -20,6 +20,11 @@ COPY requirements.txt /requirements.txt
|
||||
|
||||
RUN pip install --target=/dependencies -r /requirements.txt
|
||||
|
||||
# Playwright is an alternative to Selenium
|
||||
# Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing
|
||||
RUN pip install --target=/dependencies playwright~=1.20 \
|
||||
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
|
||||
|
||||
# Final image stage
|
||||
FROM python:3.8-slim
|
||||
|
||||
@@ -42,10 +47,15 @@ ENV PYTHONUNBUFFERED=1
|
||||
|
||||
RUN [ ! -d "/datastore" ] && mkdir /datastore
|
||||
|
||||
# Re #80, sets SECLEVEL=1 in openssl.conf to allow monitoring sites with weak/old cipher suites
|
||||
RUN sed -i 's/^CipherString = .*/CipherString = DEFAULT@SECLEVEL=1/' /etc/ssl/openssl.cnf
|
||||
|
||||
# Copy modules over to the final image and add their dir to PYTHONPATH
|
||||
COPY --from=builder /dependencies /usr/local
|
||||
ENV PYTHONPATH=/usr/local
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
# The actual flask app
|
||||
COPY changedetectionio /app/changedetectionio
|
||||
# The eventlet server wrapper
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
recursive-include changedetectionio/templates *
|
||||
recursive-include changedetectionio/static *
|
||||
recursive-include changedetectionio/model *
|
||||
include changedetection.py
|
||||
global-exclude *.pyc
|
||||
global-exclude *.pyc
|
||||
global-exclude node_modules
|
||||
global-exclude venv
|
||||
1
Procfile
Normal file
1
Procfile
Normal file
@@ -0,0 +1 @@
|
||||
web: python3 ./changedetection.py -C -d ./datastore -p $PORT
|
||||
@@ -16,6 +16,13 @@ Live your data-life *pro-actively* instead of *re-actively*, do not rely on mani
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />
|
||||
|
||||
|
||||
**Get your own private instance now! Let us host it for you!**
|
||||
|
||||
[**Try our $6.99/month subscription - unlimited checks, watches and notifications!**](https://lemonade.changedetection.io/start), choose from different geographical locations, let us handle everything for you.
|
||||
|
||||
|
||||
|
||||
#### Example use cases
|
||||
|
||||
Know when ...
|
||||
@@ -58,14 +65,3 @@ Then visit http://127.0.0.1:5000 , You should now be able to access the UI.
|
||||
|
||||
See https://github.com/dgtlmoon/changedetection.io for more information.
|
||||
|
||||
|
||||
|
||||
### Support us
|
||||
|
||||
Do you use changedetection.io to make money? does it save you time or money? Does it make your life easier? less stressful? Remember, we write this software when we should be doing actual paid work, we have to buy food and pay rent just like you.
|
||||
|
||||
Please support us, even small amounts help a LOT.
|
||||
|
||||
BTC `1PLFN327GyUarpJd7nVe7Reqg9qHx5frNn`
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/btc-support.png" style="max-width:50%;" alt="Support us!" />
|
||||
|
||||
148
README.md
148
README.md
@@ -1,45 +1,65 @@
|
||||
# changedetection.io
|
||||

|
||||
<a href="https://hub.docker.com/r/dgtlmoon/changedetection.io" target="_blank" title="Change detection docker hub">
|
||||
<img src="https://img.shields.io/docker/pulls/dgtlmoon/changedetection.io" alt="Docker Pulls"/>
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/dgtlmoon/changedetection.io" target="_blank" title="Change detection docker hub">
|
||||
<img src="https://img.shields.io/github/v/release/dgtlmoon/changedetection.io" alt="Change detection latest tag version"/>
|
||||
</a>
|
||||
[![Release Version][release-shield]][release-link] [![Docker Pulls][docker-pulls]][docker-link] [![License][license-shield]](LICENSE.md)
|
||||
|
||||
## Self-hosted open source change monitoring of web pages.
|
||||

|
||||
|
||||
## Self-Hosted, Open Source, Change Monitoring of Web Pages
|
||||
|
||||
_Know when web pages change! Stay ontop of new information!_
|
||||
|
||||
Live your data-life *pro-actively* instead of *re-actively*, do not rely on manipulative social media for consuming important information.
|
||||
Live your data-life *pro-actively* instead of *re-actively*.
|
||||
|
||||
Open source web page monitoring, notification and change detection.
|
||||
Free, Open-source web page monitoring, notification and change detection. Don't have time? [**Try our $6.99/month subscription - unlimited checks and watches!**](https://lemonade.changedetection.io/start)
|
||||
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start)
|
||||
|
||||
|
||||
**Get your own private instance now! Let us host it for you!**
|
||||
|
||||
[**Try our $6.99/month subscription - unlimited checks and watches!**](https://lemonade.changedetection.io/start) , _half the price of other website change monitoring services and comes with unlimited watches & checks!_
|
||||
|
||||
|
||||
|
||||
- Automatic Updates, Automatic Backups, No Heroku "paused application", don't miss a change!
|
||||
- Javascript browser included
|
||||
- Unlimited checks and watches!
|
||||
|
||||
|
||||
#### Example use cases
|
||||
|
||||
Know when ...
|
||||
|
||||
- Government department updates (changes are often only on their websites)
|
||||
- Local government news (changes are often only on their websites)
|
||||
- Products and services have a change in pricing
|
||||
- Governmental department updates (changes are often only on their websites)
|
||||
- New software releases, security advisories when you're not on their mailing list.
|
||||
- Festivals with changes
|
||||
- Realestate listing changes
|
||||
- COVID related news from government websites
|
||||
- University/organisation news from their website
|
||||
- Detect and monitor changes in JSON API responses
|
||||
- API monitoring and alerting
|
||||
- JSON API monitoring and alerting
|
||||
- Changes in legal and other documents
|
||||
- Trigger API calls via notifications when text appears on a website
|
||||
- Glue together APIs using the JSON filter and JSON notifications
|
||||
- Create RSS feeds based on changes in web content
|
||||
- Monitor HTML source code for unexpected changes, strengthen your PCI compliance
|
||||
- You have a very sensitive list of URLs to watch and you do _not_ want to use the paid alternatives. (Remember, _you_ are the product)
|
||||
|
||||
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver!</a>_
|
||||
|
||||
**Get monitoring now! super simple, one command!**
|
||||
## Screenshots
|
||||
|
||||
Run the python code on your own machine by cloning this repository, or with <a href="https://docs.docker.com/get-docker/">docker</a> and/or <a href="https://www.digitalocean.com/community/tutorial_collections/how-to-install-docker-compose">docker-compose</a>
|
||||
Examining differences in content.
|
||||
|
||||
**Docker**
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/screenshot-diff.png" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />
|
||||
|
||||
With Docker composer, just clone this repository and
|
||||
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
### Docker
|
||||
|
||||
With Docker composer, just clone this repository and..
|
||||
```bash
|
||||
$ docker-compose up -d
|
||||
```
|
||||
@@ -48,7 +68,14 @@ Docker standalone
|
||||
$ docker run -d --restart always -p "127.0.0.1:5000:5000" -v datastore-volume:/datastore --name changedetection.io dgtlmoon/changedetection.io
|
||||
```
|
||||
|
||||
**Python PIP**
|
||||
### Windows
|
||||
|
||||
See the install instructions at the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Microsoft-Windows
|
||||
|
||||
### Python Pip
|
||||
|
||||
Check out our pypi page https://pypi.org/project/changedetection.io/
|
||||
|
||||
```bash
|
||||
$ pip3 install changedetection.io
|
||||
$ changedetection.io -d /path/to/empty/data/dir -p 5000
|
||||
@@ -58,16 +85,31 @@ Then visit http://127.0.0.1:5000 , You should now be able to access the UI.
|
||||
|
||||
_Now with per-site configurable support for using a fast built in HTTP fetcher or use a Chrome based fetcher for monitoring of JavaScript websites!_
|
||||
|
||||
|
||||
### Screenshots
|
||||
## Updating changedetection.io
|
||||
|
||||
Examining differences in content.
|
||||
### Docker
|
||||
```
|
||||
docker pull dgtlmoon/changedetection.io
|
||||
docker kill $(docker ps -a|grep changedetection.io|awk '{print $1}')
|
||||
docker rm $(docker ps -a|grep changedetection.io|awk '{print $1}')
|
||||
docker run -d --restart always -p "127.0.0.1:5000:5000" -v datastore-volume:/datastore --name changedetection.io dgtlmoon/changedetection.io
|
||||
```
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/screenshot-diff.png" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />
|
||||
### docker-compose
|
||||
|
||||
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||
```bash
|
||||
docker-compose pull && docker-compose up -d
|
||||
```
|
||||
|
||||
### Notifications
|
||||
See the wiki for more information https://github.com/dgtlmoon/changedetection.io/wiki
|
||||
|
||||
|
||||
## Filters
|
||||
XPath, JSONPath and CSS support comes baked in! You can be as specific as you need, use XPath exported from various XPath element query creation tools.
|
||||
|
||||
(We support LXML re:test, re:math and re:replace.)
|
||||
|
||||
## Notifications
|
||||
|
||||
ChangeDetection.io supports a massive amount of notifications (including email, office365, custom APIs, etc) when a web-page has a change detected thanks to the <a href="https://github.com/caronc/apprise">apprise</a> library.
|
||||
Simply set one or more notification URL's in the _[edit]_ tab of that watch.
|
||||
@@ -85,13 +127,13 @@ Just some examples
|
||||
json://someserver.com/custom-api
|
||||
syslog://
|
||||
|
||||
<a href="https://github.com/caronc/apprise">And everything else in this list!</a>
|
||||
<a href="https://github.com/caronc/apprise#popular-notification-services">And everything else in this list!</a>
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/screenshot-notifications.png" style="max-width:100%;" alt="Self-hosted web page change monitoring notifications" title="Self-hosted web page change monitoring notifications" />
|
||||
|
||||
Now you can also customise your notification content!
|
||||
|
||||
### JSON API Monitoring
|
||||
## JSON API Monitoring
|
||||
|
||||
Detect changes and monitor data in JSON API's by using the built-in JSONPath selectors as a filter / selector.
|
||||
|
||||
@@ -101,7 +143,7 @@ This will re-parse the JSON and apply formatting to the text, making it super ea
|
||||
|
||||

|
||||
|
||||
#### Parse JSON embedded in HTML!
|
||||
### Parse JSON embedded in HTML!
|
||||
|
||||
When you enable a `json:` filter, you can even automatically extract and parse embedded JSON inside a HTML page! Amazingly handy for sites that build content based on JSON, such as many e-commerce websites.
|
||||
|
||||
@@ -115,37 +157,37 @@ When you enable a `json:` filter, you can even automatically extract and parse e
|
||||
|
||||
`json:$.price` would give `23.50`, or you can extract the whole structure
|
||||
|
||||
### Proxy
|
||||
## Proxy configuration
|
||||
|
||||
A proxy for ChangeDetection.io can be configured by setting environment the
|
||||
`HTTP_PROXY`, `HTTPS_PROXY` variables, examples are also in the `docker-compose.yml`
|
||||
See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration
|
||||
|
||||
`NO_PROXY` exclude list can be specified by following `"localhost,192.168.0.0/24"`
|
||||
## Raspberry Pi support?
|
||||
|
||||
as `docker run` with `-e`
|
||||
|
||||
```
|
||||
docker run -d --restart always -e HTTPS_PROXY="socks5h://10.10.1.10:1080" -p "127.0.0.1:5000:5000" -v datastore-volume:/datastore --name changedetection.io dgtlmoon/changedetection.io
|
||||
```
|
||||
|
||||
With `docker-compose`, see the `Proxy support example` in <a href="https://github.com/dgtlmoon/changedetection.io/blob/master/docker-compose.yml">docker-compose.yml</a>.
|
||||
|
||||
For more information see https://docs.python-requests.org/en/master/user/advanced/#proxies
|
||||
|
||||
This proxy support also extends to the notifications https://github.com/caronc/apprise/issues/387#issuecomment-841718867
|
||||
Raspberry Pi and linux/arm/v6 linux/arm/v7 arm64 devices are supported! See the wiki for [details](https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver)
|
||||
|
||||
|
||||
### RaspberriPi support?
|
||||
|
||||
RaspberriPi and linux/arm/v6 linux/arm/v7 arm64 devices are supported!
|
||||
|
||||
|
||||
### Support us
|
||||
## Support us
|
||||
|
||||
Do you use changedetection.io to make money? does it save you time or money? Does it make your life easier? less stressful? Remember, we write this software when we should be doing actual paid work, we have to buy food and pay rent just like you.
|
||||
|
||||
Please support us, even small amounts help a LOT.
|
||||
|
||||
BTC `1PLFN327GyUarpJd7nVe7Reqg9qHx5frNn`
|
||||
Firstly, consider taking out a [change detection monthly subscription - unlimited checks and watches](https://lemonade.changedetection.io/start) , even if you don't use it, you still get the warm fuzzy feeling of helping out the project. (And who knows, you might just use it!)
|
||||
|
||||
Or directly donate an amount PayPal [](https://www.paypal.com/donate/?hosted_button_id=7CP6HR9ZCNDYJ)
|
||||
|
||||
Or BTC `1PLFN327GyUarpJd7nVe7Reqg9qHx5frNn`
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/btc-support.png" style="max-width:50%;" alt="Support us!" />
|
||||
|
||||
## Commercial Support
|
||||
|
||||
I offer commercial support, this software is depended on by network security, aerospace , data-science and data-journalist professionals just to name a few, please reach out at dgtlmoon@gmail.com for any enquiries, I am more than glad to work with your organisation to further the possibilities of what can be done with changedetection.io
|
||||
|
||||
|
||||
[release-shield]: https://img.shields.io:/github/v/release/dgtlmoon/changedetection.io?style=for-the-badge
|
||||
[docker-pulls]: https://img.shields.io/docker/pulls/dgtlmoon/changedetection.io?style=for-the-badge
|
||||
[test-shield]: https://github.com/dgtlmoon/changedetection.io/actions/workflows/test-only.yml/badge.svg?branch=master
|
||||
|
||||
[license-shield]: https://img.shields.io/github/license/dgtlmoon/changedetection.io.svg?style=for-the-badge
|
||||
[release-link]: https://github.com/dgtlmoon.com/changedetection.io/releases
|
||||
[docker-link]: https://hub.docker.com/r/dgtlmoon/changedetection.io
|
||||
|
||||
21
app.json
Normal file
21
app.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "ChangeDetection.io",
|
||||
"description": "The best and simplest self-hosted open source website change detection monitoring and notification service.",
|
||||
"keywords": [
|
||||
"changedetection",
|
||||
"website monitoring"
|
||||
],
|
||||
"repository": "https://github.com/dgtlmoon/changedetection.io",
|
||||
"success_url": "/",
|
||||
"scripts": {
|
||||
},
|
||||
"env": {
|
||||
},
|
||||
"formation": {
|
||||
"web": {
|
||||
"quantity": 1,
|
||||
"size": "free"
|
||||
}
|
||||
},
|
||||
"image": "heroku/python"
|
||||
}
|
||||
@@ -1,97 +1,11 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Launch as a eventlet.wsgi server instance.
|
||||
|
||||
import getopt
|
||||
import os
|
||||
import sys
|
||||
|
||||
import eventlet
|
||||
import eventlet.wsgi
|
||||
import changedetectionio
|
||||
|
||||
from changedetectionio import store
|
||||
|
||||
def main():
|
||||
ssl_mode = False
|
||||
port = os.environ.get('PORT') or 5000
|
||||
do_cleanup = False
|
||||
|
||||
# Must be absolute so that send_from_directory doesnt try to make it relative to backend/
|
||||
datastore_path = os.path.join(os.getcwd(), "datastore")
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "csd:p:", "port")
|
||||
except getopt.GetoptError:
|
||||
print('backend.py -s SSL enable -p [port] -d [datastore path]')
|
||||
sys.exit(2)
|
||||
|
||||
for opt, arg in opts:
|
||||
# if opt == '--purge':
|
||||
# Remove history, the actual files you need to delete manually.
|
||||
# for uuid, watch in datastore.data['watching'].items():
|
||||
# watch.update({'history': {}, 'last_checked': 0, 'last_changed': 0, 'previous_md5': None})
|
||||
|
||||
if opt == '-s':
|
||||
ssl_mode = True
|
||||
|
||||
if opt == '-p':
|
||||
port = int(arg)
|
||||
|
||||
if opt == '-d':
|
||||
datastore_path = arg
|
||||
|
||||
# Cleanup (remove text files that arent in the index)
|
||||
if opt == '-c':
|
||||
do_cleanup = True
|
||||
|
||||
# isnt there some @thingy to attach to each route to tell it, that this route needs a datastore
|
||||
app_config = {'datastore_path': datastore_path}
|
||||
|
||||
if not os.path.isdir(app_config['datastore_path']):
|
||||
print ("ERROR: Directory path for the datastore '{}' does not exist, cannot start, please make sure the directory exists.\n"
|
||||
"Alternatively, use the -d parameter.".format(app_config['datastore_path']),file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], version_tag=changedetectionio.__version__)
|
||||
app = changedetectionio.changedetection_app(app_config, datastore)
|
||||
|
||||
# Go into cleanup mode
|
||||
if do_cleanup:
|
||||
datastore.remove_unused_snapshots()
|
||||
|
||||
app.config['datastore_path'] = datastore_path
|
||||
|
||||
|
||||
@app.context_processor
|
||||
def inject_version():
|
||||
return dict(right_sticky="v{}".format(datastore.data['version_tag']),
|
||||
new_version_available=app.config['NEW_VERSION_AVAILABLE'],
|
||||
has_password=datastore.data['settings']['application']['password'] != False
|
||||
)
|
||||
|
||||
# Proxy sub-directory support
|
||||
# Set environment var USE_X_SETTINGS=1 on this script
|
||||
# And then in your proxy_pass settings
|
||||
#
|
||||
# proxy_set_header Host "localhost";
|
||||
# proxy_set_header X-Forwarded-Prefix /app;
|
||||
|
||||
if os.getenv('USE_X_SETTINGS'):
|
||||
print ("USE_X_SETTINGS is ENABLED\n")
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app, x_prefix=1, x_host=1)
|
||||
|
||||
if ssl_mode:
|
||||
# @todo finalise SSL config, but this should get you in the right direction if you need it.
|
||||
eventlet.wsgi.server(eventlet.wrap_ssl(eventlet.listen(('', port)),
|
||||
certfile='cert.pem',
|
||||
keyfile='privkey.pem',
|
||||
server_side=True), app)
|
||||
|
||||
else:
|
||||
eventlet.wsgi.server(eventlet.listen(('', int(port))), app)
|
||||
# Entry-point for running from the CLI when not installed via Pip, Pip will handle the console_scripts entry_points's from setup.py
|
||||
# It's recommended to use `pip3 install changedetection.io` and start with `changedetection.py` instead, it will be linkd to your global path.
|
||||
# or Docker.
|
||||
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
|
||||
|
||||
from changedetectionio import changedetection
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
changedetection.main()
|
||||
|
||||
1
changedetectionio/.gitignore
vendored
Normal file
1
changedetectionio/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
test-datastore
|
||||
File diff suppressed because it is too large
Load Diff
0
changedetectionio/api/__init__.py
Normal file
0
changedetectionio/api/__init__.py
Normal file
125
changedetectionio/api/api_v1.py
Normal file
125
changedetectionio/api/api_v1.py
Normal file
@@ -0,0 +1,125 @@
|
||||
from flask_restful import abort, Resource
|
||||
from flask import request, make_response
|
||||
import validators
|
||||
from . import auth
|
||||
|
||||
|
||||
|
||||
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
|
||||
|
||||
class Watch(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
# datastore is a black box dependency
|
||||
self.datastore = kwargs['datastore']
|
||||
self.update_q = kwargs['update_q']
|
||||
|
||||
# Get information about a single watch, excluding the history list (can be large)
|
||||
# curl http://localhost:4000/api/v1/watch/<string:uuid>
|
||||
# ?recheck=true
|
||||
@auth.check_token
|
||||
def get(self, uuid):
|
||||
from copy import deepcopy
|
||||
watch = deepcopy(self.datastore.data['watching'].get(uuid))
|
||||
if not watch:
|
||||
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
|
||||
if request.args.get('recheck'):
|
||||
self.update_q.put(uuid)
|
||||
return "OK", 200
|
||||
|
||||
# Return without history, get that via another API call
|
||||
watch['history_n'] = len(watch['history'])
|
||||
del (watch['history'])
|
||||
return watch
|
||||
|
||||
@auth.check_token
|
||||
def delete(self, uuid):
|
||||
if not self.datastore.data['watching'].get(uuid):
|
||||
abort(400, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
|
||||
self.datastore.delete(uuid)
|
||||
return 'OK', 204
|
||||
|
||||
|
||||
class WatchHistory(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
# datastore is a black box dependency
|
||||
self.datastore = kwargs['datastore']
|
||||
|
||||
# Get a list of available history for a watch by UUID
|
||||
# curl http://localhost:4000/api/v1/watch/<string:uuid>/history
|
||||
def get(self, uuid):
|
||||
watch = self.datastore.data['watching'].get(uuid)
|
||||
if not watch:
|
||||
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
return watch['history'], 200
|
||||
|
||||
|
||||
class WatchSingleHistory(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
# datastore is a black box dependency
|
||||
self.datastore = kwargs['datastore']
|
||||
|
||||
# Read a given history snapshot and return its content
|
||||
# <string:timestamp> or "latest"
|
||||
# curl http://localhost:4000/api/v1/watch/<string:uuid>/history/<int:timestamp>
|
||||
@auth.check_token
|
||||
def get(self, uuid, timestamp):
|
||||
watch = self.datastore.data['watching'].get(uuid)
|
||||
if not watch:
|
||||
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
|
||||
if not len(watch['history']):
|
||||
abort(404, message='Watch found but no history exists for the UUID {}'.format(uuid))
|
||||
|
||||
if timestamp == 'latest':
|
||||
timestamp = list(watch['history'].keys())[-1]
|
||||
|
||||
with open(watch['history'][timestamp], 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
response = make_response(content, 200)
|
||||
response.mimetype = "text/plain"
|
||||
return response
|
||||
|
||||
|
||||
class CreateWatch(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
# datastore is a black box dependency
|
||||
self.datastore = kwargs['datastore']
|
||||
self.update_q = kwargs['update_q']
|
||||
|
||||
@auth.check_token
|
||||
def post(self):
|
||||
# curl http://localhost:4000/api/v1/watch -H "Content-Type: application/json" -d '{"url": "https://my-nice.com", "tag": "one, two" }'
|
||||
json_data = request.get_json()
|
||||
tag = json_data['tag'].strip() if json_data.get('tag') else ''
|
||||
|
||||
if not validators.url(json_data['url'].strip()):
|
||||
return "Invalid or unsupported URL", 400
|
||||
|
||||
extras = {'title': json_data['title'].strip()} if json_data.get('title') else {}
|
||||
|
||||
new_uuid = self.datastore.add_watch(url=json_data['url'].strip(), tag=tag, extras=extras)
|
||||
self.update_q.put(new_uuid)
|
||||
return {'uuid': new_uuid}, 201
|
||||
|
||||
# Return concise list of available watches and some very basic info
|
||||
# curl http://localhost:4000/api/v1/watch|python -mjson.tool
|
||||
# ?recheck_all=1 to recheck all
|
||||
@auth.check_token
|
||||
def get(self):
|
||||
list = {}
|
||||
for k, v in self.datastore.data['watching'].items():
|
||||
list[k] = {'url': v['url'],
|
||||
'title': v['title'],
|
||||
'last_checked': v['last_checked'],
|
||||
'last_changed': v['last_changed'],
|
||||
'last_error': v['last_error']}
|
||||
|
||||
if request.args.get('recheck_all'):
|
||||
for uuid in self.datastore.data['watching'].keys():
|
||||
self.update_q.put(uuid)
|
||||
return {'status': "OK"}, 200
|
||||
|
||||
return list, 200
|
||||
33
changedetectionio/api/auth.py
Normal file
33
changedetectionio/api/auth.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from flask import request, make_response, jsonify
|
||||
from functools import wraps
|
||||
|
||||
|
||||
# Simple API auth key comparison
|
||||
# @todo - Maybe short lived token in the future?
|
||||
|
||||
def check_token(f):
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
datastore = args[0].datastore
|
||||
|
||||
config_api_token_enabled = datastore.data['settings']['application'].get('api_access_token_enabled')
|
||||
if not config_api_token_enabled:
|
||||
return
|
||||
|
||||
try:
|
||||
api_key_header = request.headers['x-api-key']
|
||||
except KeyError:
|
||||
return make_response(
|
||||
jsonify("No authorization x-api-key header."), 403
|
||||
)
|
||||
|
||||
config_api_token = datastore.data['settings']['application'].get('api_access_token')
|
||||
|
||||
if api_key_header != config_api_token:
|
||||
return make_response(
|
||||
jsonify("Invalid access - API key invalid."), 403
|
||||
)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
114
changedetectionio/changedetection.py
Executable file
114
changedetectionio/changedetection.py
Executable file
@@ -0,0 +1,114 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Launch as a eventlet.wsgi server instance.
|
||||
|
||||
import getopt
|
||||
import os
|
||||
import sys
|
||||
|
||||
import eventlet
|
||||
import eventlet.wsgi
|
||||
from . import store, changedetection_app, content_fetcher
|
||||
from . import __version__
|
||||
|
||||
def main():
|
||||
ssl_mode = False
|
||||
host = ''
|
||||
port = os.environ.get('PORT') or 5000
|
||||
do_cleanup = False
|
||||
datastore_path = None
|
||||
|
||||
# On Windows, create and use a default path.
|
||||
if os.name == 'nt':
|
||||
datastore_path = os.path.expandvars(r'%APPDATA%\changedetection.io')
|
||||
os.makedirs(datastore_path, exist_ok=True)
|
||||
else:
|
||||
# Must be absolute so that send_from_directory doesnt try to make it relative to backend/
|
||||
datastore_path = os.path.join(os.getcwd(), "../datastore")
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "Ccsd:h:p:", "port")
|
||||
except getopt.GetoptError:
|
||||
print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path]')
|
||||
sys.exit(2)
|
||||
|
||||
create_datastore_dir = False
|
||||
|
||||
for opt, arg in opts:
|
||||
# if opt == '--purge':
|
||||
# Remove history, the actual files you need to delete manually.
|
||||
# for uuid, watch in datastore.data['watching'].items():
|
||||
# watch.update({'history': {}, 'last_checked': 0, 'last_changed': 0, 'previous_md5': None})
|
||||
|
||||
if opt == '-s':
|
||||
ssl_mode = True
|
||||
|
||||
if opt == '-h':
|
||||
host = arg
|
||||
|
||||
if opt == '-p':
|
||||
port = int(arg)
|
||||
|
||||
if opt == '-d':
|
||||
datastore_path = arg
|
||||
|
||||
# Cleanup (remove text files that arent in the index)
|
||||
if opt == '-c':
|
||||
do_cleanup = True
|
||||
|
||||
# Create the datadir if it doesnt exist
|
||||
if opt == '-C':
|
||||
create_datastore_dir = True
|
||||
|
||||
# isnt there some @thingy to attach to each route to tell it, that this route needs a datastore
|
||||
app_config = {'datastore_path': datastore_path}
|
||||
|
||||
if not os.path.isdir(app_config['datastore_path']):
|
||||
if create_datastore_dir:
|
||||
os.mkdir(app_config['datastore_path'])
|
||||
else:
|
||||
print(
|
||||
"ERROR: Directory path for the datastore '{}' does not exist, cannot start, please make sure the directory exists or specify a directory with the -d option.\n"
|
||||
"Or use the -C parameter to create the directory.".format(app_config['datastore_path']), file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], version_tag=__version__)
|
||||
app = changedetection_app(app_config, datastore)
|
||||
|
||||
# Go into cleanup mode
|
||||
if do_cleanup:
|
||||
datastore.remove_unused_snapshots()
|
||||
|
||||
app.config['datastore_path'] = datastore_path
|
||||
|
||||
|
||||
@app.context_processor
|
||||
def inject_version():
|
||||
return dict(right_sticky="v{}".format(datastore.data['version_tag']),
|
||||
new_version_available=app.config['NEW_VERSION_AVAILABLE'],
|
||||
has_password=datastore.data['settings']['application']['password'] != False
|
||||
)
|
||||
|
||||
# Proxy sub-directory support
|
||||
# Set environment var USE_X_SETTINGS=1 on this script
|
||||
# And then in your proxy_pass settings
|
||||
#
|
||||
# proxy_set_header Host "localhost";
|
||||
# proxy_set_header X-Forwarded-Prefix /app;
|
||||
|
||||
if os.getenv('USE_X_SETTINGS'):
|
||||
print ("USE_X_SETTINGS is ENABLED\n")
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app, x_prefix=1, x_host=1)
|
||||
|
||||
if ssl_mode:
|
||||
# @todo finalise SSL config, but this should get you in the right direction if you need it.
|
||||
eventlet.wsgi.server(eventlet.wrap_ssl(eventlet.listen((host, port)),
|
||||
certfile='cert.pem',
|
||||
keyfile='privkey.pem',
|
||||
server_side=True), app)
|
||||
|
||||
else:
|
||||
eventlet.wsgi.server(eventlet.listen((host, int(port))), app)
|
||||
|
||||
|
||||
@@ -1,31 +1,60 @@
|
||||
import os
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.common.exceptions import WebDriverException
|
||||
import urllib3.exceptions
|
||||
|
||||
import chardet
|
||||
import os
|
||||
import requests
|
||||
import time
|
||||
import sys
|
||||
|
||||
class EmptyReply(Exception):
|
||||
def __init__(self, status_code, url):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
return
|
||||
pass
|
||||
|
||||
class ReplyWithContentButNoText(Exception):
|
||||
def __init__(self, status_code, url):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
return
|
||||
pass
|
||||
|
||||
|
||||
class Fetcher():
|
||||
error = None
|
||||
status_code = None
|
||||
content = None # Should be bytes?
|
||||
content = None
|
||||
headers = None
|
||||
# Will be needed in the future by the VisualSelector, always get this where possible.
|
||||
screenshot = False
|
||||
fetcher_description = "No description"
|
||||
system_http_proxy = os.getenv('HTTP_PROXY')
|
||||
system_https_proxy = os.getenv('HTTPS_PROXY')
|
||||
|
||||
fetcher_description ="No description"
|
||||
# Time ONTOP of the system defined env minimum time
|
||||
render_extract_delay=0
|
||||
|
||||
@abstractmethod
|
||||
def get_error(self):
|
||||
return self.error
|
||||
|
||||
@abstractmethod
|
||||
def run(self, url, timeout, request_headers):
|
||||
def run(self,
|
||||
url,
|
||||
timeout,
|
||||
request_headers,
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False):
|
||||
# Should set self.error, self.status_code and self.content
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def quit(self):
|
||||
return
|
||||
|
||||
@abstractmethod
|
||||
def get_last_status_code(self):
|
||||
return self.status_code
|
||||
@@ -35,93 +64,278 @@ class Fetcher():
|
||||
def is_ready(self):
|
||||
return True
|
||||
|
||||
|
||||
# Maybe for the future, each fetcher provides its own diff output, could be used for text, image
|
||||
# the current one would return javascript output (as we use JS to generate the diff)
|
||||
#
|
||||
# Returns tuple(mime_type, stream)
|
||||
# @abstractmethod
|
||||
# def return_diff(self, stream_a, stream_b):
|
||||
# return
|
||||
|
||||
def available_fetchers():
|
||||
import inspect
|
||||
from changedetectionio import content_fetcher
|
||||
p=[]
|
||||
for name, obj in inspect.getmembers(content_fetcher):
|
||||
if inspect.isclass(obj):
|
||||
# @todo html_ is maybe better as fetcher_ or something
|
||||
# In this case, make sure to edit the default one in store.py and fetch_site_status.py
|
||||
if "html_" in name:
|
||||
t=tuple([name,obj.fetcher_description])
|
||||
p.append(t)
|
||||
# See the if statement at the bottom of this file for how we switch between playwright and webdriver
|
||||
import inspect
|
||||
p = []
|
||||
for name, obj in inspect.getmembers(sys.modules[__name__], inspect.isclass):
|
||||
if inspect.isclass(obj):
|
||||
# @todo html_ is maybe better as fetcher_ or something
|
||||
# In this case, make sure to edit the default one in store.py and fetch_site_status.py
|
||||
if name.startswith('html_'):
|
||||
t = tuple([name, obj.fetcher_description])
|
||||
p.append(t)
|
||||
|
||||
return p
|
||||
return p
|
||||
|
||||
class html_webdriver(Fetcher):
|
||||
fetcher_description = "WebDriver Chrome/Javascript"
|
||||
|
||||
class base_html_playwright(Fetcher):
|
||||
fetcher_description = "Playwright {}/Javascript".format(
|
||||
os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').capitalize()
|
||||
)
|
||||
if os.getenv("PLAYWRIGHT_DRIVER_URL"):
|
||||
fetcher_description += " via '{}'".format(os.getenv("PLAYWRIGHT_DRIVER_URL"))
|
||||
|
||||
browser_type = ''
|
||||
command_executor = ''
|
||||
|
||||
def __init__(self):
|
||||
self.command_executor = os.getenv("WEBDRIVER_URL",'http://browser-chrome:4444/wd/hub')
|
||||
# Configs for Proxy setup
|
||||
# In the ENV vars, is prefixed with "playwright_proxy_", so it is for example "playwright_proxy_server"
|
||||
playwright_proxy_settings_mappings = ['bypass', 'server', 'username', 'password']
|
||||
|
||||
def run(self, url, timeout, request_headers):
|
||||
proxy = None
|
||||
|
||||
def __init__(self, proxy_override=None):
|
||||
|
||||
# .strip('"') is going to save someone a lot of time when they accidently wrap the env value
|
||||
self.browser_type = os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').strip('"')
|
||||
self.command_executor = os.getenv(
|
||||
"PLAYWRIGHT_DRIVER_URL",
|
||||
'ws://playwright-chrome:3000'
|
||||
).strip('"')
|
||||
|
||||
# If any proxy settings are enabled, then we should setup the proxy object
|
||||
proxy_args = {}
|
||||
for k in self.playwright_proxy_settings_mappings:
|
||||
v = os.getenv('playwright_proxy_' + k, False)
|
||||
if v:
|
||||
proxy_args[k] = v.strip('"')
|
||||
|
||||
if proxy_args:
|
||||
self.proxy = proxy_args
|
||||
|
||||
# allow per-watch proxy selection override
|
||||
if proxy_override:
|
||||
self.proxy = {'server': proxy_override}
|
||||
|
||||
def run(self,
|
||||
url,
|
||||
timeout,
|
||||
request_headers,
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False):
|
||||
|
||||
from playwright.sync_api import sync_playwright
|
||||
import playwright._impl._api_types
|
||||
from playwright._impl._api_types import Error, TimeoutError
|
||||
|
||||
with sync_playwright() as p:
|
||||
browser_type = getattr(p, self.browser_type)
|
||||
|
||||
# Seemed to cause a connection Exception even tho I can see it connect
|
||||
# self.browser = browser_type.connect(self.command_executor, timeout=timeout*1000)
|
||||
browser = browser_type.connect_over_cdp(self.command_executor, timeout=timeout * 1000)
|
||||
|
||||
# Set user agent to prevent Cloudflare from blocking the browser
|
||||
# Use the default one configured in the App.py model that's passed from fetch_site_status.py
|
||||
context = browser.new_context(
|
||||
user_agent=request_headers['User-Agent'] if request_headers.get('User-Agent') else 'Mozilla/5.0',
|
||||
proxy=self.proxy
|
||||
)
|
||||
page = context.new_page()
|
||||
page.set_viewport_size({"width": 1280, "height": 1024})
|
||||
try:
|
||||
response = page.goto(url, timeout=timeout * 1000, wait_until='commit')
|
||||
# Wait_until = commit
|
||||
# - `'commit'` - consider operation to be finished when network response is received and the document started loading.
|
||||
# Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
|
||||
# This seemed to solve nearly all 'TimeoutErrors'
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
|
||||
page.wait_for_timeout(extra_wait * 1000)
|
||||
except playwright._impl._api_types.TimeoutError as e:
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
if response is None:
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
if len(page.content().strip()) == 0:
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
self.status_code = response.status
|
||||
self.content = page.content()
|
||||
self.headers = response.all_headers()
|
||||
|
||||
# Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it
|
||||
# JPEG is better here because the screenshots can be very very large
|
||||
page.screenshot(type='jpeg', clip={'x': 1.0, 'y': 1.0, 'width': 1280, 'height': 1024})
|
||||
self.screenshot = page.screenshot(type='jpeg', full_page=True, quality=90)
|
||||
context.close()
|
||||
browser.close()
|
||||
|
||||
|
||||
class base_html_webdriver(Fetcher):
|
||||
if os.getenv("WEBDRIVER_URL"):
|
||||
fetcher_description = "WebDriver Chrome/Javascript via '{}'".format(os.getenv("WEBDRIVER_URL"))
|
||||
else:
|
||||
fetcher_description = "WebDriver Chrome/Javascript"
|
||||
|
||||
command_executor = ''
|
||||
|
||||
# Configs for Proxy setup
|
||||
# In the ENV vars, is prefixed with "webdriver_", so it is for example "webdriver_sslProxy"
|
||||
selenium_proxy_settings_mappings = ['proxyType', 'ftpProxy', 'httpProxy', 'noProxy',
|
||||
'proxyAutoconfigUrl', 'sslProxy', 'autodetect',
|
||||
'socksProxy', 'socksVersion', 'socksUsername', 'socksPassword']
|
||||
proxy = None
|
||||
|
||||
def __init__(self, proxy_override=None):
|
||||
from selenium.webdriver.common.proxy import Proxy as SeleniumProxy
|
||||
|
||||
# .strip('"') is going to save someone a lot of time when they accidently wrap the env value
|
||||
self.command_executor = os.getenv("WEBDRIVER_URL", 'http://browser-chrome:4444/wd/hub').strip('"')
|
||||
|
||||
# If any proxy settings are enabled, then we should setup the proxy object
|
||||
proxy_args = {}
|
||||
for k in self.selenium_proxy_settings_mappings:
|
||||
v = os.getenv('webdriver_' + k, False)
|
||||
if v:
|
||||
proxy_args[k] = v.strip('"')
|
||||
|
||||
# Map back standard HTTP_ and HTTPS_PROXY to webDriver httpProxy/sslProxy
|
||||
if not proxy_args.get('webdriver_httpProxy') and self.system_http_proxy:
|
||||
proxy_args['httpProxy'] = self.system_http_proxy
|
||||
if not proxy_args.get('webdriver_sslProxy') and self.system_https_proxy:
|
||||
proxy_args['httpsProxy'] = self.system_https_proxy
|
||||
|
||||
# Allows override the proxy on a per-request basis
|
||||
if proxy_override is not None:
|
||||
proxy_args['httpProxy'] = proxy_override
|
||||
|
||||
if proxy_args:
|
||||
self.proxy = SeleniumProxy(raw=proxy_args)
|
||||
|
||||
def run(self,
|
||||
url,
|
||||
timeout,
|
||||
request_headers,
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False):
|
||||
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.common.exceptions import WebDriverException
|
||||
# request_body, request_method unused for now, until some magic in the future happens.
|
||||
|
||||
# check env for WEBDRIVER_URL
|
||||
driver = webdriver.Remote(
|
||||
self.driver = webdriver.Remote(
|
||||
command_executor=self.command_executor,
|
||||
desired_capabilities=DesiredCapabilities.CHROME)
|
||||
desired_capabilities=DesiredCapabilities.CHROME,
|
||||
proxy=self.proxy)
|
||||
|
||||
try:
|
||||
driver.get(url)
|
||||
self.driver.get(url)
|
||||
except WebDriverException as e:
|
||||
# Be sure we close the session window
|
||||
driver.quit()
|
||||
self.quit()
|
||||
raise
|
||||
|
||||
# @todo - how to check this? is it possible?
|
||||
self.status_code = 200
|
||||
# @todo somehow we should try to get this working for WebDriver
|
||||
# raise EmptyReply(url=url, status_code=r.status_code)
|
||||
|
||||
# @todo - dom wait loaded?
|
||||
time.sleep(5)
|
||||
self.content = driver.page_source
|
||||
|
||||
driver.quit()
|
||||
|
||||
time.sleep(int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay)
|
||||
self.content = self.driver.page_source
|
||||
self.headers = {}
|
||||
self.screenshot = self.driver.get_screenshot_as_png()
|
||||
self.quit()
|
||||
|
||||
# Does the connection to the webdriver work? run a test connection.
|
||||
def is_ready(self):
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.common.exceptions import WebDriverException
|
||||
|
||||
driver = webdriver.Remote(
|
||||
self.driver = webdriver.Remote(
|
||||
command_executor=self.command_executor,
|
||||
desired_capabilities=DesiredCapabilities.CHROME)
|
||||
|
||||
# driver.quit() seems to cause better exceptions
|
||||
driver.quit()
|
||||
|
||||
|
||||
self.quit()
|
||||
return True
|
||||
|
||||
def quit(self):
|
||||
if self.driver:
|
||||
try:
|
||||
self.driver.quit()
|
||||
except Exception as e:
|
||||
print("Exception in chrome shutdown/quit" + str(e))
|
||||
|
||||
|
||||
# "html_requests" is listed as the default fetcher in store.py!
|
||||
class html_requests(Fetcher):
|
||||
fetcher_description = "Basic fast Plaintext/HTTP Client"
|
||||
|
||||
def run(self, url, timeout, request_headers):
|
||||
import requests
|
||||
def __init__(self, proxy_override=None):
|
||||
self.proxy_override = proxy_override
|
||||
|
||||
r = requests.get(url,
|
||||
headers=request_headers,
|
||||
timeout=timeout,
|
||||
verify=False)
|
||||
def run(self,
|
||||
url,
|
||||
timeout,
|
||||
request_headers,
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False):
|
||||
|
||||
html = r.text
|
||||
proxies={}
|
||||
|
||||
# Allows override the proxy on a per-request basis
|
||||
if self.proxy_override:
|
||||
proxies = {'http': self.proxy_override, 'https': self.proxy_override, 'ftp': self.proxy_override}
|
||||
else:
|
||||
if self.system_http_proxy:
|
||||
proxies['http'] = self.system_http_proxy
|
||||
if self.system_https_proxy:
|
||||
proxies['https'] = self.system_https_proxy
|
||||
|
||||
r = requests.request(method=request_method,
|
||||
data=request_body,
|
||||
url=url,
|
||||
headers=request_headers,
|
||||
timeout=timeout,
|
||||
proxies=proxies,
|
||||
verify=False)
|
||||
|
||||
# If the response did not tell us what encoding format to expect, Then use chardet to override what `requests` thinks.
|
||||
# For example - some sites don't tell us it's utf-8, but return utf-8 content
|
||||
# This seems to not occur when using webdriver/selenium, it seems to detect the text encoding more reliably.
|
||||
# https://github.com/psf/requests/issues/1604 good info about requests encoding detection
|
||||
if not r.headers.get('content-type') or not 'charset=' in r.headers.get('content-type'):
|
||||
encoding = chardet.detect(r.content)['encoding']
|
||||
if encoding:
|
||||
r.encoding = encoding
|
||||
|
||||
# @todo test this
|
||||
if not r or not html or not len(html):
|
||||
raise EmptyReply(url)
|
||||
# @todo maybe you really want to test zero-byte return pages?
|
||||
if (not ignore_status_codes and not r) or not r.content or not len(r.content):
|
||||
raise EmptyReply(url=url, status_code=r.status_code)
|
||||
|
||||
self.status_code = r.status_code
|
||||
self.content = html
|
||||
self.content = r.text
|
||||
self.headers = r.headers
|
||||
|
||||
|
||||
# Decide which is the 'real' HTML webdriver, this is more a system wide config
|
||||
# rather than site-specific.
|
||||
use_playwright_as_chrome_fetcher = os.getenv('PLAYWRIGHT_DRIVER_URL', False)
|
||||
if use_playwright_as_chrome_fetcher:
|
||||
html_webdriver = base_html_playwright
|
||||
else:
|
||||
html_webdriver = base_html_webdriver
|
||||
|
||||
52
changedetectionio/diff.py
Normal file
52
changedetectionio/diff.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# used for the notifications, the front-end is using a JS library
|
||||
|
||||
import difflib
|
||||
|
||||
|
||||
def same_slicer(l, a, b):
|
||||
if a == b:
|
||||
return [l[a]]
|
||||
else:
|
||||
return l[a:b]
|
||||
|
||||
# like .compare but a little different output
|
||||
def customSequenceMatcher(before, after, include_equal=False):
|
||||
cruncher = difflib.SequenceMatcher(isjunk=lambda x: x in " \\t", a=before, b=after)
|
||||
|
||||
# @todo Line-by-line mode instead of buncghed, including `after` that is not in `before` (maybe unset?)
|
||||
for tag, alo, ahi, blo, bhi in cruncher.get_opcodes():
|
||||
if include_equal and tag == 'equal':
|
||||
g = before[alo:ahi]
|
||||
yield g
|
||||
elif tag == 'delete':
|
||||
g = ["(removed) " + i for i in same_slicer(before, alo, ahi)]
|
||||
yield g
|
||||
elif tag == 'replace':
|
||||
g = ["(changed) " + i for i in same_slicer(before, alo, ahi)]
|
||||
g += ["(into ) " + i for i in same_slicer(after, blo, bhi)]
|
||||
yield g
|
||||
elif tag == 'insert':
|
||||
g = ["(added ) " + i for i in same_slicer(after, blo, bhi)]
|
||||
yield g
|
||||
|
||||
# only_differences - only return info about the differences, no context
|
||||
# line_feed_sep could be "<br/>" or "<li>" or "\n" etc
|
||||
def render_diff(previous_file, newest_file, include_equal=False, line_feed_sep="\n"):
|
||||
with open(newest_file, 'r') as f:
|
||||
newest_version_file_contents = f.read()
|
||||
newest_version_file_contents = [line.rstrip() for line in newest_version_file_contents.splitlines()]
|
||||
|
||||
if previous_file:
|
||||
with open(previous_file, 'r') as f:
|
||||
previous_version_file_contents = f.read()
|
||||
previous_version_file_contents = [line.rstrip() for line in previous_version_file_contents.splitlines()]
|
||||
else:
|
||||
previous_version_file_contents = ""
|
||||
|
||||
rendered_diff = customSequenceMatcher(previous_version_file_contents,
|
||||
newest_version_file_contents,
|
||||
include_equal)
|
||||
|
||||
# Recursively join lists
|
||||
f = lambda L: line_feed_sep.join([f(x) if type(x) is list else x for x in L])
|
||||
return f(rendered_diff)
|
||||
@@ -1,10 +1,10 @@
|
||||
import time
|
||||
from changedetectionio import content_fetcher
|
||||
import hashlib
|
||||
from inscriptis import get_text
|
||||
import urllib3
|
||||
from . import html_tools
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import urllib3
|
||||
|
||||
from changedetectionio import content_fetcher, html_tools
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
@@ -16,52 +16,51 @@ class perform_site_check():
|
||||
super().__init__(*args, **kwargs)
|
||||
self.datastore = datastore
|
||||
|
||||
def strip_ignore_text(self, content, list_ignore_text):
|
||||
import re
|
||||
ignore = []
|
||||
ignore_regex = []
|
||||
for k in list_ignore_text:
|
||||
# If there was a proxy list enabled, figure out what proxy_args/which proxy to use
|
||||
# if watch.proxy use that
|
||||
# fetcher.proxy_override = watch.proxy or main config proxy
|
||||
# Allows override the proxy on a per-request basis
|
||||
# ALWAYS use the first one is nothing selected
|
||||
|
||||
# Is it a regex?
|
||||
if k[0] == '/':
|
||||
ignore_regex.append(k.strip(" /"))
|
||||
else:
|
||||
ignore.append(k)
|
||||
def set_proxy_from_list(self, watch):
|
||||
proxy_args = None
|
||||
if self.datastore.proxy_list is None:
|
||||
return None
|
||||
|
||||
output = []
|
||||
for line in content.splitlines():
|
||||
# If its a valid one
|
||||
if any([watch['proxy'] in p for p in self.datastore.proxy_list]):
|
||||
proxy_args = watch['proxy']
|
||||
|
||||
# Always ignore blank lines in this mode. (when this function gets called)
|
||||
if len(line.strip()):
|
||||
regex_matches = False
|
||||
|
||||
# if any of these match, skip
|
||||
for regex in ignore_regex:
|
||||
try:
|
||||
if re.search(regex, line, re.IGNORECASE):
|
||||
regex_matches = True
|
||||
except Exception as e:
|
||||
continue
|
||||
|
||||
if not regex_matches and not any(skip_text in line for skip_text in ignore):
|
||||
output.append(line.encode('utf8'))
|
||||
|
||||
return "\n".encode('utf8').join(output)
|
||||
# not valid (including None), try the system one
|
||||
else:
|
||||
system_proxy = self.datastore.data['settings']['requests']['proxy']
|
||||
# Is not None and exists
|
||||
if any([system_proxy in p for p in self.datastore.proxy_list]):
|
||||
proxy_args = system_proxy
|
||||
|
||||
# Fallback - Did not resolve anything, use the first available
|
||||
if proxy_args is None:
|
||||
proxy_args = self.datastore.proxy_list[0][0]
|
||||
|
||||
return proxy_args
|
||||
|
||||
def run(self, uuid):
|
||||
timestamp = int(time.time()) # used for storage etc too
|
||||
|
||||
changed_detected = False
|
||||
screenshot = False # as bytes
|
||||
stripped_text_from_html = ""
|
||||
|
||||
watch = self.datastore.data['watching'][uuid]
|
||||
|
||||
update_obj = {'previous_md5': self.datastore.data['watching'][uuid]['previous_md5'],
|
||||
'history': {},
|
||||
"last_checked": timestamp
|
||||
}
|
||||
# Protect against file:// access
|
||||
if re.search(r'^file', watch['url'], re.IGNORECASE) and not os.getenv('ALLOW_FILE_URI', False):
|
||||
raise Exception(
|
||||
"file:// type access is denied for security reasons."
|
||||
)
|
||||
|
||||
# Unset any existing notification error
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
|
||||
extra_headers = self.datastore.get_val(uuid, 'headers')
|
||||
|
||||
@@ -75,104 +74,166 @@ class perform_site_check():
|
||||
if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']:
|
||||
request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
|
||||
|
||||
# @todo check the failures are really handled how we expect
|
||||
timeout = self.datastore.data['settings']['requests']['timeout']
|
||||
url = self.datastore.get_val(uuid, 'url')
|
||||
request_body = self.datastore.get_val(uuid, 'body')
|
||||
request_method = self.datastore.get_val(uuid, 'method')
|
||||
ignore_status_code = self.datastore.get_val(uuid, 'ignore_status_codes')
|
||||
|
||||
# source: support
|
||||
is_source = False
|
||||
if url.startswith('source:'):
|
||||
url = url.replace('source:', '')
|
||||
is_source = True
|
||||
|
||||
# Pluggable content fetcher
|
||||
prefer_backend = watch['fetch_backend']
|
||||
if hasattr(content_fetcher, prefer_backend):
|
||||
klass = getattr(content_fetcher, prefer_backend)
|
||||
else:
|
||||
timeout = self.datastore.data['settings']['requests']['timeout']
|
||||
url = self.datastore.get_val(uuid, 'url')
|
||||
# If the klass doesnt exist, just use a default
|
||||
klass = getattr(content_fetcher, "html_requests")
|
||||
|
||||
# Pluggable content fetcher
|
||||
prefer_backend = watch['fetch_backend']
|
||||
if hasattr(content_fetcher, prefer_backend):
|
||||
klass = getattr(content_fetcher, prefer_backend)
|
||||
proxy_args = self.set_proxy_from_list(watch)
|
||||
fetcher = klass(proxy_override=proxy_args)
|
||||
|
||||
# Configurable per-watch or global extra delay before extracting text (for webDriver types)
|
||||
system_webdriver_delay = self.datastore.data['settings']['application'].get('webdriver_delay', None)
|
||||
if watch['webdriver_delay'] is not None:
|
||||
fetcher.render_extract_delay = watch['webdriver_delay']
|
||||
elif system_webdriver_delay is not None:
|
||||
fetcher.render_extract_delay = system_webdriver_delay
|
||||
|
||||
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_code)
|
||||
|
||||
# Fetching complete, now filters
|
||||
# @todo move to class / maybe inside of fetcher abstract base?
|
||||
|
||||
# @note: I feel like the following should be in a more obvious chain system
|
||||
# - Check filter text
|
||||
# - Is the checksum different?
|
||||
# - Do we convert to JSON?
|
||||
# https://stackoverflow.com/questions/41817578/basic-method-chaining ?
|
||||
# return content().textfilter().jsonextract().checksumcompare() ?
|
||||
|
||||
is_json = 'application/json' in fetcher.headers.get('Content-Type', '')
|
||||
is_html = not is_json
|
||||
|
||||
# source: support, basically treat it as plaintext
|
||||
if is_source:
|
||||
is_html = False
|
||||
is_json = False
|
||||
|
||||
css_filter_rule = watch['css_filter']
|
||||
subtractive_selectors = watch.get(
|
||||
"subtractive_selectors", []
|
||||
) + self.datastore.data["settings"]["application"].get(
|
||||
"global_subtractive_selectors", []
|
||||
)
|
||||
|
||||
has_filter_rule = css_filter_rule and len(css_filter_rule.strip())
|
||||
has_subtractive_selectors = subtractive_selectors and len(subtractive_selectors[0].strip())
|
||||
|
||||
if is_json and not has_filter_rule:
|
||||
css_filter_rule = "json:$"
|
||||
has_filter_rule = True
|
||||
|
||||
if has_filter_rule:
|
||||
if 'json:' in css_filter_rule:
|
||||
stripped_text_from_html = html_tools.extract_json_as_string(content=fetcher.content, jsonpath_filter=css_filter_rule)
|
||||
is_html = False
|
||||
|
||||
if is_html or is_source:
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
html_content = fetcher.content
|
||||
|
||||
# If not JSON, and if it's not text/plain..
|
||||
if 'text/plain' in fetcher.headers.get('Content-Type', '').lower():
|
||||
# Don't run get_text or xpath/css filters on plaintext
|
||||
stripped_text_from_html = html_content
|
||||
else:
|
||||
# If the klass doesnt exist, just use a default
|
||||
klass = getattr(content_fetcher, "html_requests")
|
||||
# Then we assume HTML
|
||||
if has_filter_rule:
|
||||
# For HTML/XML we offer xpath as an option, just start a regular xPath "/.."
|
||||
if css_filter_rule[0] == '/' or css_filter_rule.startswith('xpath:'):
|
||||
html_content = html_tools.xpath_filter(xpath_filter=css_filter_rule.replace('xpath:', ''),
|
||||
html_content=fetcher.content)
|
||||
else:
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
html_content = html_tools.css_filter(css_filter=css_filter_rule, html_content=fetcher.content)
|
||||
|
||||
if has_subtractive_selectors:
|
||||
html_content = html_tools.element_removal(subtractive_selectors, html_content)
|
||||
|
||||
fetcher = klass()
|
||||
fetcher.run(url, timeout, request_headers)
|
||||
# Fetching complete, now filters
|
||||
# @todo move to class / maybe inside of fetcher abstract base?
|
||||
if not is_source:
|
||||
# extract text
|
||||
stripped_text_from_html = \
|
||||
html_tools.html_to_text(
|
||||
html_content,
|
||||
render_anchor_tag_content=self.datastore.data["settings"][
|
||||
"application"].get(
|
||||
"render_anchor_tag_content", False)
|
||||
)
|
||||
|
||||
# @note: I feel like the following should be in a more obvious chain system
|
||||
# - Check filter text
|
||||
# - Is the checksum different?
|
||||
# - Do we convert to JSON?
|
||||
# https://stackoverflow.com/questions/41817578/basic-method-chaining ?
|
||||
# return content().textfilter().jsonextract().checksumcompare() ?
|
||||
elif is_source:
|
||||
stripped_text_from_html = html_content
|
||||
|
||||
is_html = True
|
||||
css_filter_rule = watch['css_filter']
|
||||
if css_filter_rule and len(css_filter_rule.strip()):
|
||||
if 'json:' in css_filter_rule:
|
||||
stripped_text_from_html = html_tools.extract_json_as_string(content=fetcher.content, jsonpath_filter=css_filter_rule)
|
||||
is_html = False
|
||||
else:
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
stripped_text_from_html = html_tools.css_filter(css_filter=css_filter_rule, html_content=fetcher.content)
|
||||
# Re #340 - return the content before the 'ignore text' was applied
|
||||
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
|
||||
|
||||
if is_html:
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
html_content = fetcher.content
|
||||
if css_filter_rule and len(css_filter_rule.strip()):
|
||||
html_content = html_tools.css_filter(css_filter=css_filter_rule, html_content=fetcher.content)
|
||||
# Re #340 - return the content before the 'ignore text' was applied
|
||||
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
|
||||
|
||||
# get_text() via inscriptis
|
||||
stripped_text_from_html = get_text(html_content)
|
||||
# Treat pages with no renderable text content as a change? No by default
|
||||
empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
|
||||
if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0:
|
||||
raise content_fetcher.ReplyWithContentButNoText(url=url, status_code=200)
|
||||
|
||||
# We rely on the actual text in the html output.. many sites have random script vars etc,
|
||||
# in the future we'll implement other mechanisms.
|
||||
# We rely on the actual text in the html output.. many sites have random script vars etc,
|
||||
# in the future we'll implement other mechanisms.
|
||||
|
||||
update_obj["last_check_status"] = fetcher.get_last_status_code()
|
||||
update_obj["last_error"] = False
|
||||
|
||||
|
||||
# If there's text to skip
|
||||
# @todo we could abstract out the get_text() to handle this cleaner
|
||||
if len(watch['ignore_text']):
|
||||
stripped_text_from_html = self.strip_ignore_text(stripped_text_from_html, watch['ignore_text'])
|
||||
else:
|
||||
stripped_text_from_html = stripped_text_from_html.encode('utf8')
|
||||
update_obj["last_check_status"] = fetcher.get_last_status_code()
|
||||
|
||||
# If there's text to skip
|
||||
# @todo we could abstract out the get_text() to handle this cleaner
|
||||
text_to_ignore = watch.get('ignore_text', []) + self.datastore.data['settings']['application'].get('global_ignore_text', [])
|
||||
if len(text_to_ignore):
|
||||
stripped_text_from_html = html_tools.strip_ignore_text(stripped_text_from_html, text_to_ignore)
|
||||
else:
|
||||
stripped_text_from_html = stripped_text_from_html.encode('utf8')
|
||||
|
||||
# Re #133 - if we should strip whitespaces from triggering the change detected comparison
|
||||
if self.datastore.data['settings']['application'].get('ignore_whitespace', False):
|
||||
fetched_md5 = hashlib.md5(stripped_text_from_html.translate(None, b'\r\n\t ')).hexdigest()
|
||||
else:
|
||||
fetched_md5 = hashlib.md5(stripped_text_from_html).hexdigest()
|
||||
|
||||
blocked_by_not_found_trigger_text = False
|
||||
# On the first run of a site, watch['previous_md5'] will be None, set it the current one.
|
||||
if not watch.get('previous_md5'):
|
||||
watch['previous_md5'] = fetched_md5
|
||||
update_obj["previous_md5"] = fetched_md5
|
||||
|
||||
if len(watch['trigger_text']):
|
||||
blocked_by_not_found_trigger_text = True
|
||||
for line in watch['trigger_text']:
|
||||
# Because JSON wont serialize a re.compile object
|
||||
if line[0] == '/' and line[-1] == '/':
|
||||
regex = re.compile(line.strip('/'), re.IGNORECASE)
|
||||
# Found it? so we don't wait for it anymore
|
||||
r = re.search(regex, str(stripped_text_from_html))
|
||||
if r:
|
||||
blocked_by_not_found_trigger_text = False
|
||||
break
|
||||
blocked_by_not_found_trigger_text = False
|
||||
|
||||
elif line.lower() in str(stripped_text_from_html).lower():
|
||||
# We found it don't wait for it.
|
||||
blocked_by_not_found_trigger_text = False
|
||||
break
|
||||
if len(watch['trigger_text']):
|
||||
# Yeah, lets block first until something matches
|
||||
blocked_by_not_found_trigger_text = True
|
||||
# Filter and trigger works the same, so reuse it
|
||||
result = html_tools.strip_ignore_text(content=str(stripped_text_from_html),
|
||||
wordlist=watch['trigger_text'],
|
||||
mode="line numbers")
|
||||
if result:
|
||||
blocked_by_not_found_trigger_text = False
|
||||
|
||||
if not blocked_by_not_found_trigger_text and watch['previous_md5'] != fetched_md5:
|
||||
changed_detected = True
|
||||
update_obj["previous_md5"] = fetched_md5
|
||||
update_obj["last_changed"] = timestamp
|
||||
|
||||
# could be None or False depending on JSON type
|
||||
# On the first run of a site, watch['previous_md5'] will be an empty string
|
||||
if not blocked_by_not_found_trigger_text and watch['previous_md5'] != fetched_md5:
|
||||
changed_detected = True
|
||||
|
||||
# Don't confuse people by updating as last-changed, when it actually just changed from None..
|
||||
if self.datastore.get_val(uuid, 'previous_md5'):
|
||||
update_obj["last_changed"] = timestamp
|
||||
|
||||
update_obj["previous_md5"] = fetched_md5
|
||||
|
||||
# Extract title as title
|
||||
if is_html and self.datastore.data['settings']['application']['extract_title_as_title']:
|
||||
# Extract title as title
|
||||
if is_html:
|
||||
if self.datastore.data['settings']['application']['extract_title_as_title'] or watch['extract_title_as_title']:
|
||||
if not watch['title'] or not len(watch['title']):
|
||||
update_obj['title'] = html_tools.extract_element(find='title', html_content=fetcher.content)
|
||||
|
||||
|
||||
return changed_detected, update_obj, stripped_text_from_html
|
||||
return changed_detected, update_obj, text_content_before_ignored_filter, fetcher.screenshot
|
||||
|
||||
@@ -1,39 +1,74 @@
|
||||
from wtforms import Form, SelectField, RadioField, BooleanField, StringField, PasswordField, validators, IntegerField, fields, TextAreaField, \
|
||||
Field
|
||||
from wtforms import widgets
|
||||
from wtforms.validators import ValidationError
|
||||
from wtforms.fields import html5
|
||||
from changedetectionio import content_fetcher
|
||||
import re
|
||||
|
||||
from wtforms import (
|
||||
BooleanField,
|
||||
Field,
|
||||
Form,
|
||||
IntegerField,
|
||||
PasswordField,
|
||||
RadioField,
|
||||
SelectField,
|
||||
StringField,
|
||||
SubmitField,
|
||||
TextAreaField,
|
||||
fields,
|
||||
validators,
|
||||
widgets,
|
||||
)
|
||||
from wtforms.validators import ValidationError
|
||||
|
||||
from changedetectionio import content_fetcher
|
||||
from changedetectionio.notification import (
|
||||
default_notification_body,
|
||||
default_notification_format,
|
||||
default_notification_title,
|
||||
valid_notification_formats,
|
||||
)
|
||||
|
||||
from wtforms.fields import FormField
|
||||
|
||||
valid_method = {
|
||||
'GET',
|
||||
'POST',
|
||||
'PUT',
|
||||
'PATCH',
|
||||
'DELETE',
|
||||
}
|
||||
|
||||
default_method = 'GET'
|
||||
|
||||
|
||||
class StringListField(StringField):
|
||||
widget = widgets.TextArea()
|
||||
|
||||
def _value(self):
|
||||
if self.data:
|
||||
return "\r\n".join(self.data)
|
||||
# ignore empty lines in the storage
|
||||
data = list(filter(lambda x: len(x.strip()), self.data))
|
||||
# Apply strip to each line
|
||||
data = list(map(lambda x: x.strip(), data))
|
||||
return "\r\n".join(data)
|
||||
else:
|
||||
return u''
|
||||
|
||||
# incoming
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
# Remove empty strings
|
||||
cleaned = list(filter(None, valuelist[0].split("\n")))
|
||||
self.data = [x.strip() for x in cleaned]
|
||||
p = 1
|
||||
if valuelist and len(valuelist[0].strip()):
|
||||
# Remove empty strings, stripping and splitting \r\n, only \n etc.
|
||||
self.data = valuelist[0].splitlines()
|
||||
# Remove empty lines from the final data
|
||||
self.data = list(filter(lambda x: len(x.strip()), self.data))
|
||||
else:
|
||||
self.data = []
|
||||
|
||||
|
||||
|
||||
class SaltyPasswordField(StringField):
|
||||
widget = widgets.PasswordInput()
|
||||
encrypted_password = ""
|
||||
|
||||
def build_password(self, password):
|
||||
import hashlib
|
||||
import base64
|
||||
import hashlib
|
||||
import secrets
|
||||
|
||||
# Make a new salt on every new password and store it with the password
|
||||
@@ -54,6 +89,13 @@ class SaltyPasswordField(StringField):
|
||||
else:
|
||||
self.data = False
|
||||
|
||||
class TimeBetweenCheckForm(Form):
|
||||
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
hours = IntegerField('Hours', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
minutes = IntegerField('Minutes', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
seconds = IntegerField('Seconds', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
# @todo add total seconds minimum validatior = minimum_seconds_recheck_time
|
||||
|
||||
# Separated by key:value
|
||||
class StringDictKeyValue(StringField):
|
||||
@@ -91,8 +133,8 @@ class ValidateContentFetcherIsReady(object):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
from changedetectionio import content_fetcher
|
||||
import urllib3.exceptions
|
||||
from changedetectionio import content_fetcher
|
||||
|
||||
# Better would be a radiohandler that keeps a reference to each class
|
||||
if field.data is not None:
|
||||
@@ -104,10 +146,12 @@ class ValidateContentFetcherIsReady(object):
|
||||
except urllib3.exceptions.MaxRetryError as e:
|
||||
driver_url = some_object.command_executor
|
||||
message = field.gettext('Content fetcher \'%s\' did not respond.' % (field.data))
|
||||
message += '<br/>'+field.gettext('Be sure that the selenium/webdriver runner is running and accessible via network from this container/host.')
|
||||
message += '<br/>' + field.gettext(
|
||||
'Be sure that the selenium/webdriver runner is running and accessible via network from this container/host.')
|
||||
message += '<br/>' + field.gettext('Did you follow the instructions in the wiki?')
|
||||
message += '<br/><br/>' + field.gettext('WebDriver Host: %s' % (driver_url))
|
||||
message += '<br/><a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">Go here for more information</a>'
|
||||
message += '<br/>'+field.gettext('Content fetcher did not respond properly, unable to use it.\n %s' % (str(e)))
|
||||
|
||||
raise ValidationError(message)
|
||||
|
||||
@@ -116,7 +160,70 @@ class ValidateContentFetcherIsReady(object):
|
||||
raise ValidationError(message % (field.data, e))
|
||||
|
||||
|
||||
class ValidateNotificationBodyAndTitleWhenURLisSet(object):
|
||||
"""
|
||||
Validates that they entered something in both notification title+body when the URL is set
|
||||
Due to https://github.com/dgtlmoon/changedetection.io/issues/360
|
||||
"""
|
||||
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
if len(field.data):
|
||||
if not len(form.notification_title.data) or not len(form.notification_body.data):
|
||||
message = field.gettext('Notification Body and Title is required when a Notification URL is used')
|
||||
raise ValidationError(message)
|
||||
|
||||
class ValidateAppRiseServers(object):
|
||||
"""
|
||||
Validates that each URL given is compatible with AppRise
|
||||
"""
|
||||
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
import apprise
|
||||
apobj = apprise.Apprise()
|
||||
|
||||
for server_url in field.data:
|
||||
if not apobj.add(server_url):
|
||||
message = field.gettext('\'%s\' is not a valid AppRise URL.' % (server_url))
|
||||
raise ValidationError(message)
|
||||
|
||||
class ValidateTokensList(object):
|
||||
"""
|
||||
Validates that a {token} is from a valid set
|
||||
"""
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
from changedetectionio import notification
|
||||
regex = re.compile('{.*?}')
|
||||
for p in re.findall(regex, field.data):
|
||||
if not p.strip('{}') in notification.valid_tokens:
|
||||
message = field.gettext('Token \'%s\' is not a valid token.')
|
||||
raise ValidationError(message % (p))
|
||||
|
||||
class validateURL(object):
|
||||
|
||||
"""
|
||||
Flask wtform validators wont work with basic auth
|
||||
"""
|
||||
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
import validators
|
||||
try:
|
||||
validators.url(field.data.strip())
|
||||
except validators.ValidationFailure:
|
||||
message = field.gettext('\'%s\' is not a valid URL.' % (field.data.strip()))
|
||||
raise ValidationError(message)
|
||||
|
||||
class ValidateListRegex(object):
|
||||
"""
|
||||
Validates that anything that looks like a regex passes as a regex
|
||||
@@ -136,64 +243,146 @@ class ValidateListRegex(object):
|
||||
message = field.gettext('RegEx \'%s\' is not a valid regular expression.')
|
||||
raise ValidationError(message % (line))
|
||||
|
||||
class ValidateCSSJSONInput(object):
|
||||
class ValidateCSSJSONXPATHInput(object):
|
||||
"""
|
||||
Filter validation
|
||||
@todo CSS validator ;)
|
||||
"""
|
||||
|
||||
def __init__(self, message=None):
|
||||
def __init__(self, message=None, allow_xpath=True, allow_json=True):
|
||||
self.message = message
|
||||
self.allow_xpath = allow_xpath
|
||||
self.allow_json = allow_json
|
||||
|
||||
def __call__(self, form, field):
|
||||
if 'json:' in field.data:
|
||||
from jsonpath_ng.exceptions import JsonPathParserError
|
||||
from jsonpath_ng import jsonpath, parse
|
||||
|
||||
input = field.data.replace('json:', '')
|
||||
if isinstance(field.data, str):
|
||||
data = [field.data]
|
||||
else:
|
||||
data = field.data
|
||||
|
||||
for line in data:
|
||||
# Nothing to see here
|
||||
if not len(line.strip()):
|
||||
return
|
||||
|
||||
# Does it look like XPath?
|
||||
if line.strip()[0] == '/':
|
||||
if not self.allow_xpath:
|
||||
raise ValidationError("XPath not permitted in this field!")
|
||||
from lxml import etree, html
|
||||
tree = html.fromstring("<html></html>")
|
||||
|
||||
try:
|
||||
tree.xpath(line.strip())
|
||||
except etree.XPathEvalError as e:
|
||||
message = field.gettext('\'%s\' is not a valid XPath expression. (%s)')
|
||||
raise ValidationError(message % (line, str(e)))
|
||||
except:
|
||||
raise ValidationError("A system-error occurred when validating your XPath expression")
|
||||
|
||||
if 'json:' in line:
|
||||
if not self.allow_json:
|
||||
raise ValidationError("JSONPath not permitted in this field!")
|
||||
|
||||
from jsonpath_ng.exceptions import (
|
||||
JsonPathLexerError,
|
||||
JsonPathParserError,
|
||||
)
|
||||
from jsonpath_ng.ext import parse
|
||||
|
||||
input = line.replace('json:', '')
|
||||
|
||||
try:
|
||||
parse(input)
|
||||
except (JsonPathParserError, JsonPathLexerError) as e:
|
||||
message = field.gettext('\'%s\' is not a valid JSONPath expression. (%s)')
|
||||
raise ValidationError(message % (input, str(e)))
|
||||
except:
|
||||
raise ValidationError("A system-error occurred when validating your JSONPath expression")
|
||||
|
||||
# Re #265 - maybe in the future fetch the page and offer a
|
||||
# warning/notice that its possible the rule doesnt yet match anything?
|
||||
|
||||
try:
|
||||
parse(input)
|
||||
except JsonPathParserError as e:
|
||||
message = field.gettext('\'%s\' is not a valid JSONPath expression. (%s)')
|
||||
raise ValidationError(message % (input, str(e)))
|
||||
|
||||
class quickWatchForm(Form):
|
||||
# https://wtforms.readthedocs.io/en/2.3.x/fields/#module-wtforms.fields.html5
|
||||
# `require_tld` = False is needed even for the test harness "http://localhost:5005.." to run
|
||||
|
||||
url = html5.URLField('URL', [validators.URL(require_tld=False)])
|
||||
url = fields.URLField('URL', validators=[validateURL()])
|
||||
tag = StringField('Group tag', [validators.Optional(), validators.Length(max=35)])
|
||||
|
||||
class watchForm(quickWatchForm):
|
||||
# Common to a single watch and the global settings
|
||||
class commonSettingsForm(Form):
|
||||
|
||||
minutes_between_check = html5.IntegerField('Maximum time in minutes until recheck',
|
||||
[validators.Optional(), validators.NumberRange(min=1)])
|
||||
css_filter = StringField('CSS/JSON Filter', [ValidateCSSJSONInput()])
|
||||
title = StringField('Title')
|
||||
notification_urls = StringListField('Notification URL list', validators=[validators.Optional(), ValidateNotificationBodyAndTitleWhenURLisSet(), ValidateAppRiseServers()])
|
||||
notification_title = StringField('Notification title', default=default_notification_title, validators=[validators.Optional(), ValidateTokensList()])
|
||||
notification_body = TextAreaField('Notification body', default=default_notification_body, validators=[validators.Optional(), ValidateTokensList()])
|
||||
notification_format = SelectField('Notification format', choices=valid_notification_formats.keys(), default=default_notification_format)
|
||||
fetch_backend = RadioField(u'Fetch method', choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
extract_title_as_title = BooleanField('Extract <title> from document and use as watch title', default=False)
|
||||
webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, message="Should contain one or more seconds")] )
|
||||
|
||||
fetch_backend = RadioField(u'Fetch Method', choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
class watchForm(commonSettingsForm):
|
||||
|
||||
ignore_text = StringListField('Ignore Text', [ValidateListRegex()])
|
||||
notification_urls = StringListField('Notification URL List')
|
||||
headers = StringDictKeyValue('Request Headers')
|
||||
trigger_check = BooleanField('Send test notification on save')
|
||||
url = fields.URLField('URL', validators=[validateURL()])
|
||||
tag = StringField('Group tag', [validators.Optional(), validators.Length(max=35)], default='')
|
||||
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
|
||||
css_filter = StringField('CSS/JSON/XPATH Filter', [ValidateCSSJSONXPATHInput()], default='')
|
||||
|
||||
subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)])
|
||||
title = StringField('Title', default='')
|
||||
|
||||
ignore_text = StringListField('Ignore text', [ValidateListRegex()])
|
||||
headers = StringDictKeyValue('Request headers')
|
||||
body = TextAreaField('Request body', [validators.Optional()])
|
||||
method = SelectField('Request method', choices=valid_method, default=default_method)
|
||||
ignore_status_codes = BooleanField('Ignore status codes (process non-2xx status codes as normal)', default=False)
|
||||
trigger_text = StringListField('Trigger/wait for text', [validators.Optional(), ValidateListRegex()])
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||
save_and_preview_button = SubmitField('Save & Preview', render_kw={"class": "pure-button pure-button-primary"})
|
||||
proxy = RadioField('Proxy')
|
||||
|
||||
def validate(self, **kwargs):
|
||||
if not super().validate():
|
||||
return False
|
||||
|
||||
result = True
|
||||
|
||||
# Fail form validation when a body is set for a GET
|
||||
if self.method.data == 'GET' and self.body.data:
|
||||
self.body.errors.append('Body must be empty when Request Method is set to GET')
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# datastore.data['settings']['requests']..
|
||||
class globalSettingsRequestForm(Form):
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
proxy = RadioField('Proxy')
|
||||
|
||||
|
||||
# datastore.data['settings']['application']..
|
||||
class globalSettingsApplicationForm(commonSettingsForm):
|
||||
|
||||
base_url = StringField('Base URL', validators=[validators.Optional()])
|
||||
global_subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)])
|
||||
global_ignore_text = StringListField('Ignore Text', [ValidateListRegex()])
|
||||
ignore_whitespace = BooleanField('Ignore whitespace')
|
||||
real_browser_save_screenshot = BooleanField('Save last screenshot when using Chrome?')
|
||||
removepassword_button = SubmitField('Remove password', render_kw={"class": "pure-button pure-button-primary"})
|
||||
empty_pages_are_a_change = BooleanField('Treat empty pages as a change?', default=False)
|
||||
render_anchor_tag_content = BooleanField('Render anchor tag content', default=False)
|
||||
fetch_backend = RadioField('Fetch Method', default="html_requests", choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
api_access_token_enabled = BooleanField('API access token security check enabled', default=True, validators=[validators.Optional()])
|
||||
password = SaltyPasswordField()
|
||||
|
||||
|
||||
class globalSettingsForm(Form):
|
||||
# Define these as FormFields/"sub forms", this way it matches the JSON storage
|
||||
# datastore.data['settings']['application']..
|
||||
# datastore.data['settings']['requests']..
|
||||
|
||||
password = SaltyPasswordField()
|
||||
|
||||
minutes_between_check = html5.IntegerField('Maximum time in minutes until recheck',
|
||||
[validators.NumberRange(min=1)])
|
||||
|
||||
notification_urls = StringListField('Notification URL List')
|
||||
|
||||
fetch_backend = RadioField(u'Fetch Method', choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
|
||||
extract_title_as_title = BooleanField('Extract <title> from document and use as watch title')
|
||||
trigger_check = BooleanField('Send test notification on save')
|
||||
|
||||
notification_title = StringField('Notification Title')
|
||||
notification_body = TextAreaField('Notification Body')
|
||||
requests = FormField(globalSettingsRequestForm)
|
||||
application = FormField(globalSettingsApplicationForm)
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import json
|
||||
import re
|
||||
from typing import List
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from jsonpath_ng import parse
|
||||
from jsonpath_ng.ext import parse
|
||||
import re
|
||||
from inscriptis import get_text
|
||||
from inscriptis.model.config import ParserConfig
|
||||
|
||||
|
||||
class JSONNotFound(ValueError):
|
||||
@@ -16,6 +22,31 @@ def css_filter(css_filter, html_content):
|
||||
|
||||
return html_block + "\n"
|
||||
|
||||
def subtractive_css_selector(css_selector, html_content):
|
||||
soup = BeautifulSoup(html_content, "html.parser")
|
||||
for item in soup.select(css_selector):
|
||||
item.decompose()
|
||||
return str(soup)
|
||||
|
||||
|
||||
def element_removal(selectors: List[str], html_content):
|
||||
"""Joins individual filters into one css filter."""
|
||||
selector = ",".join(selectors)
|
||||
return subtractive_css_selector(selector, html_content)
|
||||
|
||||
|
||||
# Return str Utf-8 of matched rules
|
||||
def xpath_filter(xpath_filter, html_content):
|
||||
from lxml import etree, html
|
||||
|
||||
tree = html.fromstring(html_content)
|
||||
html_block = ""
|
||||
|
||||
for item in tree.xpath(xpath_filter.strip(), namespaces={'re':'http://exslt.org/regular-expressions'}):
|
||||
html_block+= etree.tostring(item, pretty_print=True).decode('utf-8')+"<br/>"
|
||||
|
||||
return html_block
|
||||
|
||||
|
||||
# Extract/find element
|
||||
def extract_element(find='title', html_content=''):
|
||||
@@ -45,10 +76,13 @@ def _parse_json(json_data, jsonpath_filter):
|
||||
if len(match) == 1:
|
||||
s = match[0].value
|
||||
|
||||
if not s:
|
||||
raise JSONNotFound("No Matching JSON could be found for the rule {}".format(jsonpath_filter.replace('json:', '')))
|
||||
# Re #257 - Better handling where it does not exist, in the case the original 's' value was False..
|
||||
if not match:
|
||||
# Re 265 - Just return an empty string when filter not found
|
||||
return ''
|
||||
|
||||
stripped_text_from_html = json.dumps(s, indent=4)
|
||||
# Ticket #462 - allow the original encoding through, usually it's UTF-8 or similar
|
||||
stripped_text_from_html = json.dumps(s, indent=4, ensure_ascii=False)
|
||||
|
||||
return stripped_text_from_html
|
||||
|
||||
@@ -85,6 +119,86 @@ def extract_json_as_string(content, jsonpath_filter):
|
||||
break
|
||||
|
||||
if not stripped_text_from_html:
|
||||
raise JSONNotFound("No JSON matching the rule '%s' found" % jsonpath_filter.replace('json:',''))
|
||||
# Re 265 - Just return an empty string when filter not found
|
||||
return ''
|
||||
|
||||
return stripped_text_from_html
|
||||
|
||||
# Mode - "content" return the content without the matches (default)
|
||||
# - "line numbers" return a list of line numbers that match (int list)
|
||||
#
|
||||
# wordlist - list of regex's (str) or words (str)
|
||||
def strip_ignore_text(content, wordlist, mode="content"):
|
||||
ignore = []
|
||||
ignore_regex = []
|
||||
|
||||
# @todo check this runs case insensitive
|
||||
for k in wordlist:
|
||||
|
||||
# Is it a regex?
|
||||
if k[0] == '/':
|
||||
ignore_regex.append(k.strip(" /"))
|
||||
else:
|
||||
ignore.append(k)
|
||||
|
||||
i = 0
|
||||
output = []
|
||||
ignored_line_numbers = []
|
||||
for line in content.splitlines():
|
||||
i += 1
|
||||
# Always ignore blank lines in this mode. (when this function gets called)
|
||||
if len(line.strip()):
|
||||
regex_matches = False
|
||||
|
||||
# if any of these match, skip
|
||||
for regex in ignore_regex:
|
||||
try:
|
||||
if re.search(regex, line, re.IGNORECASE):
|
||||
regex_matches = True
|
||||
except Exception as e:
|
||||
continue
|
||||
|
||||
if not regex_matches and not any(skip_text.lower() in line.lower() for skip_text in ignore):
|
||||
output.append(line.encode('utf8'))
|
||||
else:
|
||||
ignored_line_numbers.append(i)
|
||||
|
||||
|
||||
|
||||
# Used for finding out what to highlight
|
||||
if mode == "line numbers":
|
||||
return ignored_line_numbers
|
||||
|
||||
return "\n".encode('utf8').join(output)
|
||||
|
||||
|
||||
def html_to_text(html_content: str, render_anchor_tag_content=False) -> str:
|
||||
"""Converts html string to a string with just the text. If ignoring
|
||||
rendering anchor tag content is enable, anchor tag content are also
|
||||
included in the text
|
||||
|
||||
:param html_content: string with html content
|
||||
:param render_anchor_tag_content: boolean flag indicating whether to extract
|
||||
hyperlinks (the anchor tag content) together with text. This refers to the
|
||||
'href' inside 'a' tags.
|
||||
Anchor tag content is rendered in the following manner:
|
||||
'[ text ](anchor tag content)'
|
||||
:return: extracted text from the HTML
|
||||
"""
|
||||
# if anchor tag content flag is set to True define a config for
|
||||
# extracting this content
|
||||
if render_anchor_tag_content:
|
||||
|
||||
parser_config = ParserConfig(
|
||||
annotation_rules={"a": ["hyperlink"]}, display_links=True
|
||||
)
|
||||
|
||||
# otherwise set config to None
|
||||
else:
|
||||
parser_config = None
|
||||
|
||||
# get text and annotations via inscriptis
|
||||
text_content = get_text(html_content, config=parser_config)
|
||||
|
||||
return text_content
|
||||
|
||||
|
||||
133
changedetectionio/importer.py
Normal file
133
changedetectionio/importer.py
Normal file
@@ -0,0 +1,133 @@
|
||||
from abc import ABC, abstractmethod
|
||||
import time
|
||||
import validators
|
||||
|
||||
|
||||
class Importer():
|
||||
remaining_data = []
|
||||
new_uuids = []
|
||||
good = 0
|
||||
|
||||
def __init__(self):
|
||||
self.new_uuids = []
|
||||
self.good = 0
|
||||
self.remaining_data = []
|
||||
|
||||
@abstractmethod
|
||||
def run(self,
|
||||
data,
|
||||
flash,
|
||||
datastore):
|
||||
pass
|
||||
|
||||
|
||||
class import_url_list(Importer):
|
||||
"""
|
||||
Imports a list, can be in <code>https://example.com tag1, tag2, last tag</code> format
|
||||
"""
|
||||
def run(self,
|
||||
data,
|
||||
flash,
|
||||
datastore,
|
||||
):
|
||||
|
||||
urls = data.split("\n")
|
||||
good = 0
|
||||
now = time.time()
|
||||
|
||||
if (len(urls) > 5000):
|
||||
flash("Importing 5,000 of the first URLs from your list, the rest can be imported again.")
|
||||
|
||||
for url in urls:
|
||||
url = url.strip()
|
||||
if not len(url):
|
||||
continue
|
||||
|
||||
tags = ""
|
||||
|
||||
# 'tags' should be a csv list after the URL
|
||||
if ' ' in url:
|
||||
url, tags = url.split(" ", 1)
|
||||
|
||||
# Flask wtform validators wont work with basic auth, use validators package
|
||||
# Up to 5000 per batch so we dont flood the server
|
||||
if len(url) and validators.url(url.replace('source:', '')) and good < 5000:
|
||||
new_uuid = datastore.add_watch(url=url.strip(), tag=tags, write_to_disk_now=False)
|
||||
if new_uuid:
|
||||
# Straight into the queue.
|
||||
self.new_uuids.append(new_uuid)
|
||||
good += 1
|
||||
continue
|
||||
|
||||
# Worked past the 'continue' above, append it to the bad list
|
||||
if self.remaining_data is None:
|
||||
self.remaining_data = []
|
||||
self.remaining_data.append(url)
|
||||
|
||||
flash("{} Imported from list in {:.2f}s, {} Skipped.".format(good, time.time() - now, len(self.remaining_data)))
|
||||
|
||||
|
||||
class import_distill_io_json(Importer):
|
||||
def run(self,
|
||||
data,
|
||||
flash,
|
||||
datastore,
|
||||
):
|
||||
|
||||
import json
|
||||
good = 0
|
||||
now = time.time()
|
||||
self.new_uuids=[]
|
||||
|
||||
|
||||
try:
|
||||
data = json.loads(data.strip())
|
||||
except json.decoder.JSONDecodeError:
|
||||
flash("Unable to read JSON file, was it broken?", 'error')
|
||||
return
|
||||
|
||||
if not data.get('data'):
|
||||
flash("JSON structure looks invalid, was it broken?", 'error')
|
||||
return
|
||||
|
||||
for d in data.get('data'):
|
||||
d_config = json.loads(d['config'])
|
||||
extras = {'title': d['name']}
|
||||
|
||||
if len(d['uri']) and good < 5000:
|
||||
try:
|
||||
# @todo we only support CSS ones at the moment
|
||||
if d_config['selections'][0]['frames'][0]['excludes'][0]['type'] == 'css':
|
||||
extras['subtractive_selectors'] = d_config['selections'][0]['frames'][0]['excludes'][0]['expr']
|
||||
except KeyError:
|
||||
pass
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
try:
|
||||
extras['css_filter'] = d_config['selections'][0]['frames'][0]['includes'][0]['expr']
|
||||
if d_config['selections'][0]['frames'][0]['includes'][0]['type'] == 'xpath':
|
||||
extras['css_filter'] = 'xpath:' + extras['css_filter']
|
||||
|
||||
except KeyError:
|
||||
pass
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
try:
|
||||
extras['tag'] = ", ".join(d['tags'])
|
||||
except KeyError:
|
||||
pass
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
new_uuid = datastore.add_watch(url=d['uri'].strip(),
|
||||
extras=extras,
|
||||
write_to_disk_now=False)
|
||||
|
||||
if new_uuid:
|
||||
# Straight into the queue.
|
||||
self.new_uuids.append(new_uuid)
|
||||
good += 1
|
||||
|
||||
flash("{} Imported from Distill.io in {:.2f}s, {} Skipped.".format(len(self.new_uuids), time.time() - now, len(self.remaining_data)))
|
||||
54
changedetectionio/model/App.py
Normal file
54
changedetectionio/model/App.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import collections
|
||||
import os
|
||||
|
||||
import uuid as uuid_builder
|
||||
|
||||
from changedetectionio.notification import (
|
||||
default_notification_body,
|
||||
default_notification_format,
|
||||
default_notification_title,
|
||||
)
|
||||
|
||||
class model(dict):
|
||||
base_config = {
|
||||
'note': "Hello! If you change this file manually, please be sure to restart your changedetection.io instance!",
|
||||
'watching': {},
|
||||
'settings': {
|
||||
'headers': {
|
||||
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36',
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
|
||||
'Accept-Encoding': 'gzip, deflate', # No support for brolti in python requests yet.
|
||||
'Accept-Language': 'en-GB,en-US;q=0.9,en;'
|
||||
},
|
||||
'requests': {
|
||||
'timeout': 15, # Default 15 seconds
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': 3, 'minutes': None, 'seconds': None},
|
||||
'workers': 10, # Number of threads, lower is better for slow connections
|
||||
'proxy': None # Preferred proxy connection
|
||||
},
|
||||
'application': {
|
||||
'api_access_token_enabled': True,
|
||||
'password': False,
|
||||
'base_url' : None,
|
||||
'extract_title_as_title': False,
|
||||
'empty_pages_are_a_change': False,
|
||||
'fetch_backend': os.getenv("DEFAULT_FETCH_BACKEND", "html_requests"),
|
||||
'global_ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
'global_subtractive_selectors': [],
|
||||
'ignore_whitespace': False,
|
||||
'render_anchor_tag_content': False,
|
||||
'notification_urls': [], # Apprise URL list
|
||||
# Custom notification content
|
||||
'notification_title': default_notification_title,
|
||||
'notification_body': default_notification_body,
|
||||
'notification_format': default_notification_format,
|
||||
'real_browser_save_screenshot': True,
|
||||
'schema_version' : 0,
|
||||
'webdriver_delay': None # Extra delay in seconds before extracting text
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
self.update(self.base_config)
|
||||
70
changedetectionio/model/Watch.py
Normal file
70
changedetectionio/model/Watch.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import os
|
||||
|
||||
import uuid as uuid_builder
|
||||
|
||||
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
|
||||
|
||||
from changedetectionio.notification import (
|
||||
default_notification_body,
|
||||
default_notification_format,
|
||||
default_notification_title,
|
||||
)
|
||||
|
||||
|
||||
class model(dict):
|
||||
base_config = {
|
||||
'url': None,
|
||||
'tag': None,
|
||||
'last_checked': 0,
|
||||
'last_changed': 0,
|
||||
'paused': False,
|
||||
'last_viewed': 0, # history key value of the last viewed via the [diff] link
|
||||
'newest_history_key': 0,
|
||||
'title': None,
|
||||
'previous_md5': False,
|
||||
# UUID not needed, should be generated only as a key
|
||||
# 'uuid':
|
||||
'headers': {}, # Extra headers to send
|
||||
'body': None,
|
||||
'method': 'GET',
|
||||
'history': {}, # Dict of timestamp and output stripped filename
|
||||
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
# Custom notification content
|
||||
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
|
||||
'notification_title': default_notification_title,
|
||||
'notification_body': default_notification_body,
|
||||
'notification_format': default_notification_format,
|
||||
'css_filter': "",
|
||||
'subtractive_selectors': [],
|
||||
'trigger_text': [], # List of text or regex to wait for until a change is detected
|
||||
'fetch_backend': None,
|
||||
'extract_title_as_title': False,
|
||||
'proxy': None, # Preferred proxy connection
|
||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||
# Requires setting to None on submit if it's the same as the default
|
||||
# Should be all None by default, so we use the system default in this case.
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None},
|
||||
'webdriver_delay': None
|
||||
}
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
self.update(self.base_config)
|
||||
# goes at the end so we update the default object with the initialiser
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
|
||||
|
||||
@property
|
||||
def has_empty_checktime(self):
|
||||
# using all() + dictionary comprehension
|
||||
# Check if all values are 0 in dictionary
|
||||
res = all(x == None or x == False or x==0 for x in self.get('time_between_check', {}).values())
|
||||
return res
|
||||
|
||||
def threshold_seconds(self):
|
||||
seconds = 0
|
||||
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
|
||||
for m, n in mtable.items():
|
||||
x = self.get('time_between_check', {}).get(m, None)
|
||||
if x:
|
||||
seconds += x * n
|
||||
return seconds
|
||||
0
changedetectionio/model/__init__.py
Normal file
0
changedetectionio/model/__init__.py
Normal file
@@ -1,42 +1,119 @@
|
||||
import os
|
||||
import apprise
|
||||
from apprise import NotifyFormat
|
||||
|
||||
valid_tokens = {
|
||||
'base_url': '',
|
||||
'watch_url': '',
|
||||
'watch_uuid': '',
|
||||
'watch_title': '',
|
||||
'watch_tag': '',
|
||||
'diff': '',
|
||||
'diff_full': '',
|
||||
'diff_url': '',
|
||||
'preview_url': '',
|
||||
'current_snapshot': ''
|
||||
}
|
||||
|
||||
valid_notification_formats = {
|
||||
'Text': NotifyFormat.TEXT,
|
||||
'Markdown': NotifyFormat.MARKDOWN,
|
||||
'HTML': NotifyFormat.HTML,
|
||||
}
|
||||
|
||||
default_notification_format = 'Text'
|
||||
default_notification_body = '{watch_url} had a change.\n---\n{diff}\n---\n'
|
||||
default_notification_title = 'ChangeDetection.io Notification - {watch_url}'
|
||||
|
||||
def process_notification(n_object, datastore):
|
||||
apobj = apprise.Apprise()
|
||||
for url in n_object['notification_urls']:
|
||||
print (">> Process Notification: AppRise notifying {}".format(url.strip()))
|
||||
apobj.add(url.strip())
|
||||
|
||||
# Get the notification body from datastore
|
||||
n_body = datastore.data['settings']['application']['notification_body']
|
||||
# Get the notification title from the datastore
|
||||
n_title = datastore.data['settings']['application']['notification_title']
|
||||
n_body = n_object.get('notification_body', default_notification_body)
|
||||
n_title = n_object.get('notification_title', default_notification_title)
|
||||
n_format = valid_notification_formats.get(
|
||||
n_object['notification_format'],
|
||||
valid_notification_formats[default_notification_format],
|
||||
)
|
||||
|
||||
|
||||
# Insert variables into the notification content
|
||||
notification_parameters = create_notification_parameters(n_object)
|
||||
raw_notification_text = [n_body, n_title]
|
||||
notification_parameters = create_notification_parameters(n_object, datastore)
|
||||
|
||||
parameterised_notification_text = dict(
|
||||
[
|
||||
(i, n.replace(n, n.format(**notification_parameters)))
|
||||
for i, n in zip(['body', 'title'], raw_notification_text)
|
||||
]
|
||||
)
|
||||
for n_k in notification_parameters:
|
||||
token = '{' + n_k + '}'
|
||||
val = notification_parameters[n_k]
|
||||
n_title = n_title.replace(token, val)
|
||||
n_body = n_body.replace(token, val)
|
||||
|
||||
apobj.notify(
|
||||
body=parameterised_notification_text["body"],
|
||||
title=parameterised_notification_text["title"]
|
||||
)
|
||||
# https://github.com/caronc/apprise/wiki/Development_LogCapture
|
||||
# Anything higher than or equal to WARNING (which covers things like Connection errors)
|
||||
# raise it as an exception
|
||||
apobjs=[]
|
||||
for url in n_object['notification_urls']:
|
||||
|
||||
apobj = apprise.Apprise(debug=True)
|
||||
url = url.strip()
|
||||
if len(url):
|
||||
print(">> Process Notification: AppRise notifying {}".format(url))
|
||||
with apprise.LogCapture(level=apprise.logging.DEBUG) as logs:
|
||||
# Re 323 - Limit discord length to their 2000 char limit total or it wont send.
|
||||
# Because different notifications may require different pre-processing, run each sequentially :(
|
||||
# 2000 bytes minus -
|
||||
# 200 bytes for the overhead of the _entire_ json payload, 200 bytes for {tts, wait, content} etc headers
|
||||
# Length of URL - Incase they specify a longer custom avatar_url
|
||||
|
||||
# So if no avatar_url is specified, add one so it can be correctly calculated into the total payload
|
||||
k = '?' if not '?' in url else '&'
|
||||
if not 'avatar_url' in url:
|
||||
url += k + 'avatar_url=https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png'
|
||||
|
||||
if url.startswith('tgram://'):
|
||||
# real limit is 4096, but minus some for extra metadata
|
||||
payload_max_size = 3600
|
||||
body_limit = max(0, payload_max_size - len(n_title))
|
||||
n_title = n_title[0:payload_max_size]
|
||||
n_body = n_body[0:body_limit]
|
||||
|
||||
elif url.startswith('discord://'):
|
||||
# real limit is 2000, but minus some for extra metadata
|
||||
payload_max_size = 1700
|
||||
body_limit = max(0, payload_max_size - len(n_title))
|
||||
n_title = n_title[0:payload_max_size]
|
||||
n_body = n_body[0:body_limit]
|
||||
|
||||
apobj.add(url)
|
||||
|
||||
apobj.notify(
|
||||
title=n_title,
|
||||
body=n_body,
|
||||
body_format=n_format)
|
||||
|
||||
apobj.clear()
|
||||
|
||||
# Incase it needs to exist in memory for a while after to process(?)
|
||||
apobjs.append(apobj)
|
||||
|
||||
# Returns empty string if nothing found, multi-line string otherwise
|
||||
log_value = logs.getvalue()
|
||||
if log_value and 'WARNING' in log_value or 'ERROR' in log_value:
|
||||
raise Exception(log_value)
|
||||
|
||||
# Notification title + body content parameters get created here.
|
||||
def create_notification_parameters(n_object):
|
||||
def create_notification_parameters(n_object, datastore):
|
||||
from copy import deepcopy
|
||||
|
||||
# in the case we send a test notification from the main settings, there is no UUID.
|
||||
uuid = n_object['uuid'] if 'uuid' in n_object else ''
|
||||
|
||||
if uuid != '':
|
||||
watch_title = datastore.data['watching'][uuid]['title']
|
||||
watch_tag = datastore.data['watching'][uuid]['tag']
|
||||
else:
|
||||
watch_title = 'Change Detection'
|
||||
watch_tag = ''
|
||||
|
||||
# Create URLs to customise the notification with
|
||||
base_url = os.getenv('BASE_URL', '').strip('"')
|
||||
base_url = datastore.data['settings']['application']['base_url']
|
||||
|
||||
watch_url = n_object['watch_url']
|
||||
|
||||
# Re #148 - Some people have just {base_url} in the body or title, but this may break some notification services
|
||||
@@ -47,11 +124,22 @@ def create_notification_parameters(n_object):
|
||||
diff_url = "{}/diff/{}".format(base_url, uuid)
|
||||
preview_url = "{}/preview/{}".format(base_url, uuid)
|
||||
|
||||
return {
|
||||
'base_url': base_url,
|
||||
'watch_url': watch_url,
|
||||
'diff_url': diff_url,
|
||||
'preview_url': preview_url,
|
||||
'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else ''
|
||||
}
|
||||
# Not sure deepcopy is needed here, but why not
|
||||
tokens = deepcopy(valid_tokens)
|
||||
|
||||
# Valid_tokens also used as a field validator
|
||||
tokens.update(
|
||||
{
|
||||
'base_url': base_url if base_url is not None else '',
|
||||
'watch_url': watch_url,
|
||||
'watch_uuid': uuid,
|
||||
'watch_title': watch_title if watch_title is not None else '',
|
||||
'watch_tag': watch_tag if watch_tag is not None else '',
|
||||
'diff_url': diff_url,
|
||||
'diff': n_object.get('diff', ''), # Null default in the case we use a test
|
||||
'diff_full': n_object.get('diff_full', ''), # Null default in the case we use a test
|
||||
'preview_url': preview_url,
|
||||
'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else ''
|
||||
})
|
||||
|
||||
return tokens
|
||||
|
||||
@@ -9,11 +9,16 @@
|
||||
# exit when any command fails
|
||||
set -e
|
||||
|
||||
# Re #65 - Ability to include a link back to the installation, in the notification.
|
||||
export BASE_URL="https://foobar.com"
|
||||
|
||||
find tests/test_*py -type f|while read test_name
|
||||
do
|
||||
echo "TEST RUNNING $test_name"
|
||||
pytest $test_name
|
||||
done
|
||||
|
||||
echo "RUNNING WITH BASE_URL SET"
|
||||
|
||||
# Now re-run some tests with BASE_URL enabled
|
||||
# Re #65 - Ability to include a link back to the installation, in the notification.
|
||||
export BASE_URL="https://really-unique-domain.io"
|
||||
pytest tests/test_notification.py
|
||||
|
||||
|
||||
BIN
changedetectionio/static/images/avatar-256x256.png
Normal file
BIN
changedetectionio/static/images/avatar-256x256.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 38 KiB |
40
changedetectionio/static/images/copy.svg
Normal file
40
changedetectionio/static/images/copy.svg
Normal file
@@ -0,0 +1,40 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
version="1.1"
|
||||
id="Layer_1"
|
||||
x="0px"
|
||||
y="0px"
|
||||
viewBox="0 0 115.77 122.88"
|
||||
style="enable-background:new 0 0 115.77 122.88"
|
||||
xml:space="preserve"
|
||||
sodipodi:docname="copy.svg"
|
||||
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"><defs
|
||||
id="defs11" /><sodipodi:namedview
|
||||
id="namedview9"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
inkscape:zoom="5.5501303"
|
||||
inkscape:cx="57.83648"
|
||||
inkscape:cy="61.439999"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1056"
|
||||
inkscape:window-x="1920"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="g6" /><style
|
||||
type="text/css"
|
||||
id="style2">.st0{fill-rule:evenodd;clip-rule:evenodd;}</style><g
|
||||
id="g6"><path
|
||||
class="st0"
|
||||
d="M89.62,13.96v7.73h12.19h0.01v0.02c3.85,0.01,7.34,1.57,9.86,4.1c2.5,2.51,4.06,5.98,4.07,9.82h0.02v0.02 v73.27v0.01h-0.02c-0.01,3.84-1.57,7.33-4.1,9.86c-2.51,2.5-5.98,4.06-9.82,4.07v0.02h-0.02h-61.7H40.1v-0.02 c-3.84-0.01-7.34-1.57-9.86-4.1c-2.5-2.51-4.06-5.98-4.07-9.82h-0.02v-0.02V92.51H13.96h-0.01v-0.02c-3.84-0.01-7.34-1.57-9.86-4.1 c-2.5-2.51-4.06-5.98-4.07-9.82H0v-0.02V13.96v-0.01h0.02c0.01-3.85,1.58-7.34,4.1-9.86c2.51-2.5,5.98-4.06,9.82-4.07V0h0.02h61.7 h0.01v0.02c3.85,0.01,7.34,1.57,9.86,4.1c2.5,2.51,4.06,5.98,4.07,9.82h0.02V13.96L89.62,13.96z M79.04,21.69v-7.73v-0.02h0.02 c0-0.91-0.39-1.75-1.01-2.37c-0.61-0.61-1.46-1-2.37-1v0.02h-0.01h-61.7h-0.02v-0.02c-0.91,0-1.75,0.39-2.37,1.01 c-0.61,0.61-1,1.46-1,2.37h0.02v0.01v64.59v0.02h-0.02c0,0.91,0.39,1.75,1.01,2.37c0.61,0.61,1.46,1,2.37,1v-0.02h0.01h12.19V35.65 v-0.01h0.02c0.01-3.85,1.58-7.34,4.1-9.86c2.51-2.5,5.98-4.06,9.82-4.07v-0.02h0.02H79.04L79.04,21.69z M105.18,108.92V35.65v-0.02 h0.02c0-0.91-0.39-1.75-1.01-2.37c-0.61-0.61-1.46-1-2.37-1v0.02h-0.01h-61.7h-0.02v-0.02c-0.91,0-1.75,0.39-2.37,1.01 c-0.61,0.61-1,1.46-1,2.37h0.02v0.01v73.27v0.02h-0.02c0,0.91,0.39,1.75,1.01,2.37c0.61,0.61,1.46,1,2.37,1v-0.02h0.01h61.7h0.02 v0.02c0.91,0,1.75-0.39,2.37-1.01c0.61-0.61,1-1.46,1-2.37h-0.02V108.92L105.18,108.92z"
|
||||
id="path4"
|
||||
style="fill:#ffffff;fill-opacity:1" /></g></svg>
|
||||
|
After Width: | Height: | Size: 2.5 KiB |
46
changedetectionio/static/images/spread.svg
Normal file
46
changedetectionio/static/images/spread.svg
Normal file
@@ -0,0 +1,46 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="18"
|
||||
height="19.92"
|
||||
viewBox="0 0 18 19.92"
|
||||
version="1.1"
|
||||
id="svg6"
|
||||
sodipodi:docname="spread.svg"
|
||||
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs10" />
|
||||
<sodipodi:namedview
|
||||
id="namedview8"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:zoom="28.416667"
|
||||
inkscape:cx="9.0087975"
|
||||
inkscape:cy="9.9941348"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1056"
|
||||
inkscape:window-x="1920"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg6" />
|
||||
<path
|
||||
d="M -3,-2 H 21 V 22 H -3 Z"
|
||||
fill="none"
|
||||
id="path2" />
|
||||
<path
|
||||
d="m 15,14.08 c -0.76,0 -1.44,0.3 -1.96,0.77 L 5.91,10.7 C 5.96,10.47 6,10.24 6,10 6,9.76 5.96,9.53 5.91,9.3 L 12.96,5.19 C 13.5,5.69 14.21,6 15,6 16.66,6 18,4.66 18,3 18,1.34 16.66,0 15,0 c -1.66,0 -3,1.34 -3,3 0,0.24 0.04,0.47 0.09,0.7 L 5.04,7.81 C 4.5,7.31 3.79,7 3,7 1.34,7 0,8.34 0,10 c 0,1.66 1.34,3 3,3 0.79,0 1.5,-0.31 2.04,-0.81 l 7.12,4.16 c -0.05,0.21 -0.08,0.43 -0.08,0.65 0,1.61 1.31,2.92 2.92,2.92 1.61,0 2.92,-1.31 2.92,-2.92 0,-1.61 -1.31,-2.92 -2.92,-2.92 z"
|
||||
id="path4"
|
||||
style="fill:#0078e7;fill-opacity:1" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
36
changedetectionio/static/js/global-settings.js
Normal file
36
changedetectionio/static/js/global-settings.js
Normal file
@@ -0,0 +1,36 @@
|
||||
$(document).ready(function () {
|
||||
function toggle() {
|
||||
if ($('input[name="application-fetch_backend"]:checked').val() != 'html_requests') {
|
||||
$('#requests-override-options').hide();
|
||||
$('#webdriver-override-options').show();
|
||||
} else {
|
||||
$('#requests-override-options').show();
|
||||
$('#webdriver-override-options').hide();
|
||||
}
|
||||
}
|
||||
|
||||
$('input[name="application-fetch_backend"]').click(function (e) {
|
||||
toggle();
|
||||
});
|
||||
toggle();
|
||||
|
||||
$("#api-key").hover(
|
||||
function () {
|
||||
$("#api-key-copy").html('copy').fadeIn();
|
||||
},
|
||||
function () {
|
||||
$("#api-key-copy").hide();
|
||||
}
|
||||
).click(function (e) {
|
||||
$("#api-key-copy").html('copied');
|
||||
var range = document.createRange();
|
||||
var n = $("#api-key")[0];
|
||||
range.selectNode(n);
|
||||
window.getSelection().removeAllRanges();
|
||||
window.getSelection().addRange(range);
|
||||
document.execCommand("copy");
|
||||
window.getSelection().removeAllRanges();
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
2
changedetectionio/static/js/jquery-3.6.0.min.js
vendored
Normal file
2
changedetectionio/static/js/jquery-3.6.0.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
53
changedetectionio/static/js/notifications.js
Normal file
53
changedetectionio/static/js/notifications.js
Normal file
@@ -0,0 +1,53 @@
|
||||
$(document).ready(function() {
|
||||
|
||||
$('#add-email-helper').click(function (e) {
|
||||
e.preventDefault();
|
||||
email = prompt("Destination email");
|
||||
if(email) {
|
||||
var n = $(".notification-urls");
|
||||
var p=email_notification_prefix;
|
||||
$(n).val( $.trim( $(n).val() )+"\n"+email_notification_prefix+email );
|
||||
}
|
||||
});
|
||||
|
||||
$('#send-test-notification').click(function (e) {
|
||||
e.preventDefault();
|
||||
|
||||
// this can be global
|
||||
var csrftoken = $('input[name=csrf_token]').val();
|
||||
$.ajaxSetup({
|
||||
beforeSend: function(xhr, settings) {
|
||||
if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) {
|
||||
xhr.setRequestHeader("X-CSRFToken", csrftoken)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
data = {
|
||||
window_url : window.location.href,
|
||||
notification_urls : $('.notification-urls').val(),
|
||||
notification_title : $('.notification-title').val(),
|
||||
notification_body : $('.notification-body').val(),
|
||||
notification_format : $('.notification-format').val(),
|
||||
}
|
||||
for (key in data) {
|
||||
if (!data[key].length) {
|
||||
alert(key+" is empty, cannot send test.")
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: notification_base_url,
|
||||
data : data
|
||||
}).done(function(data){
|
||||
console.log(data);
|
||||
alert('Sent');
|
||||
}).fail(function(data){
|
||||
console.log(data);
|
||||
alert('Error: '+data.responseJSON.error);
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
window.addEventListener("load", (event) => {
|
||||
// just an example for now
|
||||
function toggleVisible(elem) {
|
||||
// theres better ways todo this
|
||||
var x = document.getElementById(elem);
|
||||
if (x.style.display === "block") {
|
||||
x.style.display = "none";
|
||||
} else {
|
||||
x.style.display = "block";
|
||||
}
|
||||
}
|
||||
|
||||
document.getElementById("toggle-customise-notifications").onclick = function () {
|
||||
toggleVisible("notification-customisation");
|
||||
};
|
||||
});
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
// Rewrite this is a plugin.. is all this JS really 'worth it?'
|
||||
|
||||
|
||||
if(!window.location.hash) {
|
||||
var tab=document.querySelectorAll("#default-tab a");
|
||||
tab[0].click();
|
||||
}
|
||||
|
||||
window.addEventListener('hashchange', function() {
|
||||
var tabs = document.getElementsByClassName('active');
|
||||
while (tabs[0]) {
|
||||
@@ -21,7 +26,6 @@ if (!has_errors.length) {
|
||||
focus_error_tab();
|
||||
}
|
||||
|
||||
|
||||
function set_active_tab() {
|
||||
var tab=document.querySelectorAll("a[href='"+location.hash+"']");
|
||||
if (tab.length) {
|
||||
|
||||
24
changedetectionio/static/js/watch-overview.js
Normal file
24
changedetectionio/static/js/watch-overview.js
Normal file
@@ -0,0 +1,24 @@
|
||||
$(function () {
|
||||
// Remove unviewed status when normally clicked
|
||||
$('.diff-link').click(function () {
|
||||
$(this).closest('.unviewed').removeClass('unviewed');
|
||||
});
|
||||
|
||||
$('.with-share-link > *').click(function () {
|
||||
$("#copied-clipboard").remove();
|
||||
|
||||
var range = document.createRange();
|
||||
var n=$("#share-link")[0];
|
||||
range.selectNode(n);
|
||||
window.getSelection().removeAllRanges();
|
||||
window.getSelection().addRange(range);
|
||||
document.execCommand("copy");
|
||||
window.getSelection().removeAllRanges();
|
||||
|
||||
$('.with-share-link').append('<span style="font-size: 80%; color: #fff;" id="copied-clipboard">Copied to clipboard</span>');
|
||||
$("#copied-clipboard").fadeOut(2500, function() {
|
||||
$(this).remove();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
16
changedetectionio/static/js/watch-settings.js
Normal file
16
changedetectionio/static/js/watch-settings.js
Normal file
@@ -0,0 +1,16 @@
|
||||
$(document).ready(function() {
|
||||
function toggle() {
|
||||
if ($('input[name="fetch_backend"]:checked').val() != 'html_requests') {
|
||||
$('#requests-override-options').hide();
|
||||
$('#webdriver-override-options').show();
|
||||
} else {
|
||||
$('#requests-override-options').show();
|
||||
$('#webdriver-override-options').hide();
|
||||
}
|
||||
}
|
||||
$('input[name="fetch_backend"]').click(function (e) {
|
||||
toggle();
|
||||
});
|
||||
toggle();
|
||||
|
||||
});
|
||||
@@ -1,9 +1,10 @@
|
||||
#diff-ui {
|
||||
background: #fff;
|
||||
padding: 2em;
|
||||
margin: 1em;
|
||||
margin-left: 1em;
|
||||
margin-right: 1em;
|
||||
border-radius: 5px;
|
||||
font-size: 9px; }
|
||||
font-size: 11px; }
|
||||
#diff-ui table {
|
||||
table-layout: fixed;
|
||||
width: 100%; }
|
||||
@@ -54,3 +55,24 @@ ins {
|
||||
body {
|
||||
height: 99%;
|
||||
/* Hide scroll bar in Firefox */ } }
|
||||
|
||||
td#diff-col div {
|
||||
text-align: justify;
|
||||
white-space: pre-wrap; }
|
||||
|
||||
.ignored {
|
||||
background-color: #ccc;
|
||||
/* border: #0d91fa 1px solid; */
|
||||
opacity: 0.7; }
|
||||
|
||||
.triggered {
|
||||
background-color: #1b98f8; }
|
||||
|
||||
/* ignored and triggered? make it obvious error */
|
||||
.ignored.triggered {
|
||||
background-color: #ff0000; }
|
||||
|
||||
.tab-pane-inner#screenshot {
|
||||
text-align: center; }
|
||||
.tab-pane-inner#screenshot img {
|
||||
max-width: 99%; }
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
|
||||
background: #fff;
|
||||
padding: 2em;
|
||||
margin: 1em;
|
||||
margin-left: 1em;
|
||||
margin-right: 1em;
|
||||
border-radius: 5px;
|
||||
font-size: 9px;
|
||||
font-size: 11px;
|
||||
|
||||
table {
|
||||
table-layout: fixed;
|
||||
@@ -66,3 +67,30 @@ ins {
|
||||
height: 99%; /* Hide scroll bar in Firefox */
|
||||
}
|
||||
}
|
||||
|
||||
td#diff-col div {
|
||||
text-align: justify;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
.ignored {
|
||||
background-color: #ccc;
|
||||
/* border: #0d91fa 1px solid; */
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
.triggered {
|
||||
background-color: #1b98f8;
|
||||
}
|
||||
|
||||
/* ignored and triggered? make it obvious error */
|
||||
.ignored.triggered {
|
||||
background-color: #ff0000;
|
||||
}
|
||||
|
||||
.tab-pane-inner#screenshot {
|
||||
text-align: center;
|
||||
img {
|
||||
max-width: 99%;
|
||||
}
|
||||
}
|
||||
369
changedetectionio/static/styles/package-lock.json
generated
369
changedetectionio/static/styles/package-lock.json
generated
@@ -9,8 +9,8 @@
|
||||
"version": "0.0.3",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"node-sass": "^6.0.1",
|
||||
"tar": "^6.1.6",
|
||||
"node-sass": "^7.0.0",
|
||||
"tar": "^6.1.9",
|
||||
"trim-newlines": "^3.0.1"
|
||||
}
|
||||
},
|
||||
@@ -128,13 +128,35 @@
|
||||
}
|
||||
},
|
||||
"node_modules/ansi-styles": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz",
|
||||
"integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=",
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
||||
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
||||
"dependencies": {
|
||||
"color-convert": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/ansi-styles/node_modules/color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"dependencies": {
|
||||
"color-name": "~1.1.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=7.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ansi-styles/node_modules/color-name": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
|
||||
},
|
||||
"node_modules/aproba": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz",
|
||||
@@ -251,18 +273,18 @@
|
||||
"integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
|
||||
},
|
||||
"node_modules/chalk": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz",
|
||||
"integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=",
|
||||
"version": "4.1.2",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
|
||||
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
|
||||
"dependencies": {
|
||||
"ansi-styles": "^2.2.1",
|
||||
"escape-string-regexp": "^1.0.2",
|
||||
"has-ansi": "^2.0.0",
|
||||
"strip-ansi": "^3.0.0",
|
||||
"supports-color": "^2.0.0"
|
||||
"ansi-styles": "^4.1.0",
|
||||
"supports-color": "^7.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/chalk?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/chownr": {
|
||||
@@ -344,6 +366,14 @@
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
|
||||
"integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU="
|
||||
},
|
||||
"node_modules/color-support": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
|
||||
"integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==",
|
||||
"bin": {
|
||||
"color-support": "bin.js"
|
||||
}
|
||||
},
|
||||
"node_modules/combined-stream": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||
@@ -677,17 +707,6 @@
|
||||
"node": ">= 0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/has-ansi": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz",
|
||||
"integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=",
|
||||
"dependencies": {
|
||||
"ansi-regex": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/has-flag": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
|
||||
@@ -1042,13 +1061,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/node-sass": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/node-sass/-/node-sass-6.0.1.tgz",
|
||||
"integrity": "sha512-f+Rbqt92Ful9gX0cGtdYwjTrWAaGURgaK5rZCWOgCNyGWusFYHhbqCCBoFBeat+HKETOU02AyTxNhJV0YZf2jQ==",
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/node-sass/-/node-sass-7.0.0.tgz",
|
||||
"integrity": "sha512-6yUnsD3L8fVbgMX6nKQqZkjRcG7a/PpmF0pEyeWf+BgbTj2ToJlCYrnUifL2KbjV5gIY22I3oppahBWA3B+jUg==",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"async-foreach": "^0.1.3",
|
||||
"chalk": "^1.1.1",
|
||||
"chalk": "^4.1.2",
|
||||
"cross-spawn": "^7.0.3",
|
||||
"gaze": "^1.0.0",
|
||||
"get-stdin": "^4.0.1",
|
||||
@@ -1057,7 +1076,7 @@
|
||||
"meow": "^9.0.0",
|
||||
"nan": "^2.13.2",
|
||||
"node-gyp": "^7.1.0",
|
||||
"npmlog": "^4.0.0",
|
||||
"npmlog": "^5.0.0",
|
||||
"request": "^2.88.0",
|
||||
"sass-graph": "2.2.5",
|
||||
"stdout-stream": "^1.4.0",
|
||||
@@ -1070,6 +1089,106 @@
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/node-sass/node_modules/ansi-regex": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
|
||||
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/node-sass/node_modules/are-we-there-yet": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz",
|
||||
"integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==",
|
||||
"dependencies": {
|
||||
"delegates": "^1.0.0",
|
||||
"readable-stream": "^3.6.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/node-sass/node_modules/emoji-regex": {
|
||||
"version": "8.0.0",
|
||||
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
||||
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
|
||||
},
|
||||
"node_modules/node-sass/node_modules/gauge": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz",
|
||||
"integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==",
|
||||
"dependencies": {
|
||||
"aproba": "^1.0.3 || ^2.0.0",
|
||||
"color-support": "^1.1.2",
|
||||
"console-control-strings": "^1.0.0",
|
||||
"has-unicode": "^2.0.1",
|
||||
"object-assign": "^4.1.1",
|
||||
"signal-exit": "^3.0.0",
|
||||
"string-width": "^4.2.3",
|
||||
"strip-ansi": "^6.0.1",
|
||||
"wide-align": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/node-sass/node_modules/is-fullwidth-code-point": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
|
||||
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/node-sass/node_modules/npmlog": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz",
|
||||
"integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==",
|
||||
"dependencies": {
|
||||
"are-we-there-yet": "^2.0.0",
|
||||
"console-control-strings": "^1.1.0",
|
||||
"gauge": "^3.0.0",
|
||||
"set-blocking": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/node-sass/node_modules/readable-stream": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
|
||||
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
|
||||
"dependencies": {
|
||||
"inherits": "^2.0.3",
|
||||
"string_decoder": "^1.1.1",
|
||||
"util-deprecate": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/node-sass/node_modules/string-width": {
|
||||
"version": "4.2.3",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
|
||||
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
|
||||
"dependencies": {
|
||||
"emoji-regex": "^8.0.0",
|
||||
"is-fullwidth-code-point": "^3.0.0",
|
||||
"strip-ansi": "^6.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/node-sass/node_modules/strip-ansi": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
|
||||
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
|
||||
"dependencies": {
|
||||
"ansi-regex": "^5.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/nopt": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz",
|
||||
@@ -1616,17 +1735,28 @@
|
||||
}
|
||||
},
|
||||
"node_modules/supports-color": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz",
|
||||
"integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=",
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
|
||||
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
|
||||
"dependencies": {
|
||||
"has-flag": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8.0"
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/supports-color/node_modules/has-flag": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
||||
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/tar": {
|
||||
"version": "6.1.6",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.6.tgz",
|
||||
"integrity": "sha512-oaWyu5dQbHaYcyZCTfyPpC+VmI62/OM2RTUYavTk1MDr1cwW5Boi3baeYQKiZbY2uSQJGr+iMOzb/JFxLrft+g==",
|
||||
"version": "6.1.9",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.9.tgz",
|
||||
"integrity": "sha512-XjLaMNl76o07zqZC/aW4lwegdY07baOH1T8w3AEfrHAdyg/oYO4ctjzEBq9Gy9fEP9oHqLIgvx6zuGDGe+bc8Q==",
|
||||
"dependencies": {
|
||||
"chownr": "^2.0.0",
|
||||
"fs-minipass": "^2.0.0",
|
||||
@@ -2050,9 +2180,27 @@
|
||||
"integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8="
|
||||
},
|
||||
"ansi-styles": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz",
|
||||
"integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4="
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
||||
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
||||
"requires": {
|
||||
"color-convert": "^2.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"requires": {
|
||||
"color-name": "~1.1.4"
|
||||
}
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"aproba": {
|
||||
"version": "1.2.0",
|
||||
@@ -2149,15 +2297,12 @@
|
||||
"integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
|
||||
},
|
||||
"chalk": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz",
|
||||
"integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=",
|
||||
"version": "4.1.2",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
|
||||
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
|
||||
"requires": {
|
||||
"ansi-styles": "^2.2.1",
|
||||
"escape-string-regexp": "^1.0.2",
|
||||
"has-ansi": "^2.0.0",
|
||||
"strip-ansi": "^3.0.0",
|
||||
"supports-color": "^2.0.0"
|
||||
"ansi-styles": "^4.1.0",
|
||||
"supports-color": "^7.1.0"
|
||||
}
|
||||
},
|
||||
"chownr": {
|
||||
@@ -2223,6 +2368,11 @@
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
|
||||
"integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU="
|
||||
},
|
||||
"color-support": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
|
||||
"integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg=="
|
||||
},
|
||||
"combined-stream": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||
@@ -2485,14 +2635,6 @@
|
||||
"function-bind": "^1.1.1"
|
||||
}
|
||||
},
|
||||
"has-ansi": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz",
|
||||
"integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=",
|
||||
"requires": {
|
||||
"ansi-regex": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"has-flag": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
|
||||
@@ -2768,12 +2910,12 @@
|
||||
}
|
||||
},
|
||||
"node-sass": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/node-sass/-/node-sass-6.0.1.tgz",
|
||||
"integrity": "sha512-f+Rbqt92Ful9gX0cGtdYwjTrWAaGURgaK5rZCWOgCNyGWusFYHhbqCCBoFBeat+HKETOU02AyTxNhJV0YZf2jQ==",
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/node-sass/-/node-sass-7.0.0.tgz",
|
||||
"integrity": "sha512-6yUnsD3L8fVbgMX6nKQqZkjRcG7a/PpmF0pEyeWf+BgbTj2ToJlCYrnUifL2KbjV5gIY22I3oppahBWA3B+jUg==",
|
||||
"requires": {
|
||||
"async-foreach": "^0.1.3",
|
||||
"chalk": "^1.1.1",
|
||||
"chalk": "^4.1.2",
|
||||
"cross-spawn": "^7.0.3",
|
||||
"gaze": "^1.0.0",
|
||||
"get-stdin": "^4.0.1",
|
||||
@@ -2782,11 +2924,92 @@
|
||||
"meow": "^9.0.0",
|
||||
"nan": "^2.13.2",
|
||||
"node-gyp": "^7.1.0",
|
||||
"npmlog": "^4.0.0",
|
||||
"npmlog": "^5.0.0",
|
||||
"request": "^2.88.0",
|
||||
"sass-graph": "2.2.5",
|
||||
"stdout-stream": "^1.4.0",
|
||||
"true-case-path": "^1.0.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"ansi-regex": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
|
||||
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="
|
||||
},
|
||||
"are-we-there-yet": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz",
|
||||
"integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==",
|
||||
"requires": {
|
||||
"delegates": "^1.0.0",
|
||||
"readable-stream": "^3.6.0"
|
||||
}
|
||||
},
|
||||
"emoji-regex": {
|
||||
"version": "8.0.0",
|
||||
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
||||
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
|
||||
},
|
||||
"gauge": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz",
|
||||
"integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==",
|
||||
"requires": {
|
||||
"aproba": "^1.0.3 || ^2.0.0",
|
||||
"color-support": "^1.1.2",
|
||||
"console-control-strings": "^1.0.0",
|
||||
"has-unicode": "^2.0.1",
|
||||
"object-assign": "^4.1.1",
|
||||
"signal-exit": "^3.0.0",
|
||||
"string-width": "^4.2.3",
|
||||
"strip-ansi": "^6.0.1",
|
||||
"wide-align": "^1.1.2"
|
||||
}
|
||||
},
|
||||
"is-fullwidth-code-point": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
|
||||
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
|
||||
},
|
||||
"npmlog": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz",
|
||||
"integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==",
|
||||
"requires": {
|
||||
"are-we-there-yet": "^2.0.0",
|
||||
"console-control-strings": "^1.1.0",
|
||||
"gauge": "^3.0.0",
|
||||
"set-blocking": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"readable-stream": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
|
||||
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
|
||||
"requires": {
|
||||
"inherits": "^2.0.3",
|
||||
"string_decoder": "^1.1.1",
|
||||
"util-deprecate": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"string-width": {
|
||||
"version": "4.2.3",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
|
||||
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
|
||||
"requires": {
|
||||
"emoji-regex": "^8.0.0",
|
||||
"is-fullwidth-code-point": "^3.0.0",
|
||||
"strip-ansi": "^6.0.1"
|
||||
}
|
||||
},
|
||||
"strip-ansi": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
|
||||
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
|
||||
"requires": {
|
||||
"ansi-regex": "^5.0.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"nopt": {
|
||||
@@ -3213,14 +3436,24 @@
|
||||
}
|
||||
},
|
||||
"supports-color": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz",
|
||||
"integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc="
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
|
||||
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
|
||||
"requires": {
|
||||
"has-flag": "^4.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"has-flag": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
||||
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"tar": {
|
||||
"version": "6.1.6",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.6.tgz",
|
||||
"integrity": "sha512-oaWyu5dQbHaYcyZCTfyPpC+VmI62/OM2RTUYavTk1MDr1cwW5Boi3baeYQKiZbY2uSQJGr+iMOzb/JFxLrft+g==",
|
||||
"version": "6.1.9",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.9.tgz",
|
||||
"integrity": "sha512-XjLaMNl76o07zqZC/aW4lwegdY07baOH1T8w3AEfrHAdyg/oYO4ctjzEBq9Gy9fEP9oHqLIgvx6zuGDGe+bc8Q==",
|
||||
"requires": {
|
||||
"chownr": "^2.0.0",
|
||||
"fs-minipass": "^2.0.0",
|
||||
|
||||
@@ -4,14 +4,13 @@
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"scss": "node-sass --watch styles.scss diff.scss -o ."
|
||||
"build": "node-sass styles.scss -o .;node-sass diff.scss -o ."
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"node-sass": "^6.0.1",
|
||||
"tar": "^6.1.6",
|
||||
"node-sass": "^7.0.0",
|
||||
"tar": "^6.1.9",
|
||||
"trim-newlines": "^3.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
/*
|
||||
* -- BASE STYLES --
|
||||
* Most of these are inherited from Base, but I want to change a few.
|
||||
* npm run scss
|
||||
* nvm use v14.18.1
|
||||
* npm install
|
||||
* npm run build
|
||||
* or npm run watch
|
||||
*/
|
||||
body {
|
||||
color: #333;
|
||||
@@ -28,21 +31,24 @@ a.github-link {
|
||||
|
||||
section.content {
|
||||
padding-top: 5em;
|
||||
padding-bottom: 5em;
|
||||
padding-bottom: 1em;
|
||||
flex-direction: column;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center; }
|
||||
|
||||
code {
|
||||
background: #eee; }
|
||||
|
||||
/* table related */
|
||||
.watch-table {
|
||||
width: 100%; }
|
||||
width: 100%;
|
||||
font-size: 80%; }
|
||||
.watch-table tr.unviewed {
|
||||
font-weight: bold; }
|
||||
.watch-table .error {
|
||||
color: #a00; }
|
||||
.watch-table td {
|
||||
font-size: 80%;
|
||||
white-space: nowrap; }
|
||||
.watch-table td.title-col {
|
||||
word-break: break-all;
|
||||
@@ -77,11 +83,11 @@ section.content {
|
||||
|
||||
body:after {
|
||||
content: "";
|
||||
background: linear-gradient(130deg, #ff7a18, #af002d 41.07%, #319197 76.05%); }
|
||||
background: linear-gradient(130deg, #5ad8f7, #2f50af 41.07%, #9150bf 84.05%); }
|
||||
|
||||
body:after, body:before {
|
||||
display: block;
|
||||
height: 600px;
|
||||
height: 650px;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
@@ -93,9 +99,6 @@ body::after {
|
||||
|
||||
body::before {
|
||||
content: "";
|
||||
background-image: url(/static/images/gradient-border.png); }
|
||||
|
||||
body:before {
|
||||
background-size: cover; }
|
||||
|
||||
body:after, body:before {
|
||||
@@ -177,14 +180,19 @@ body:after, body:before {
|
||||
.messages li.notice {
|
||||
background: rgba(255, 255, 255, 0.5); }
|
||||
|
||||
.messages.with-share-link > *:hover {
|
||||
cursor: pointer; }
|
||||
|
||||
#notification-customisation {
|
||||
display: block;
|
||||
border: 1px solid #ccc;
|
||||
padding: 1rem;
|
||||
padding: 0.5rem;
|
||||
border-radius: 5px; }
|
||||
|
||||
#toggle-customise-notifications {
|
||||
cursor: pointer; }
|
||||
#notification-error-log {
|
||||
border: 1px solid #ccc;
|
||||
padding: 1rem;
|
||||
border-radius: 5px;
|
||||
overflow-wrap: break-word; }
|
||||
|
||||
#token-table.pure-table td, #token-table.pure-table th {
|
||||
font-size: 80%; }
|
||||
@@ -200,7 +208,8 @@ body:after, body:before {
|
||||
#new-watch-form .label {
|
||||
display: none; }
|
||||
#new-watch-form legend {
|
||||
color: #fff; }
|
||||
color: #fff;
|
||||
font-weight: bold; }
|
||||
|
||||
#diff-col {
|
||||
padding-left: 40px; }
|
||||
@@ -243,14 +252,18 @@ footer {
|
||||
|
||||
.sticky-tab {
|
||||
position: absolute;
|
||||
top: 80px;
|
||||
font-size: 8px;
|
||||
top: 60px;
|
||||
font-size: 65%;
|
||||
background: #fff;
|
||||
padding: 10px; }
|
||||
.sticky-tab#left-sticky {
|
||||
left: 0px; }
|
||||
.sticky-tab#right-sticky {
|
||||
right: 0px; }
|
||||
.sticky-tab#hosted-sticky {
|
||||
right: 0px;
|
||||
top: 100px;
|
||||
font-weight: bold; }
|
||||
|
||||
#new-version-text a {
|
||||
color: #e07171; }
|
||||
@@ -271,10 +284,20 @@ footer {
|
||||
.pure-form {
|
||||
/* The input fields with errors */
|
||||
/* The list of errors */ }
|
||||
.pure-form fieldset {
|
||||
padding-top: 0px; }
|
||||
.pure-form fieldset ul {
|
||||
padding-bottom: 0px;
|
||||
margin-bottom: 0px; }
|
||||
.pure-form .pure-control-group, .pure-form .pure-group, .pure-form .pure-controls {
|
||||
padding-bottom: 1em; }
|
||||
.pure-form .pure-control-group div, .pure-form .pure-group div, .pure-form .pure-controls div {
|
||||
margin: 0px; }
|
||||
.pure-form .pure-control-group .checkbox > *, .pure-form .pure-group .checkbox > *, .pure-form .pure-controls .checkbox > * {
|
||||
display: inline;
|
||||
vertical-align: middle; }
|
||||
.pure-form .pure-control-group .checkbox > label, .pure-form .pure-group .checkbox > label, .pure-form .pure-controls .checkbox > label {
|
||||
padding-left: 5px; }
|
||||
.pure-form .error input {
|
||||
background-color: #ffebeb; }
|
||||
.pure-form ul.errors {
|
||||
@@ -291,10 +314,10 @@ footer {
|
||||
font-weight: bold; }
|
||||
.pure-form textarea {
|
||||
width: 100%; }
|
||||
.pure-form ul#fetch_backend {
|
||||
.pure-form .inline-radio ul {
|
||||
margin: 0px;
|
||||
list-style: none; }
|
||||
.pure-form ul#fetch_backend > li > * {
|
||||
.pure-form .inline-radio ul li > * {
|
||||
display: inline-block; }
|
||||
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
@@ -306,14 +329,23 @@ footer {
|
||||
#nav-menu {
|
||||
overflow-x: scroll; } }
|
||||
|
||||
/*
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 800px) {
|
||||
div.sticky-tab#hosted-sticky {
|
||||
top: 60px;
|
||||
left: 0px;
|
||||
right: auto; }
|
||||
section.content {
|
||||
padding-top: 110px; }
|
||||
div.tabs.collapsable ul li {
|
||||
display: block;
|
||||
border-radius: 0px; }
|
||||
input[type='text'] {
|
||||
width: 100%; }
|
||||
/*
|
||||
Max width before this PARTICULAR table gets nasty
|
||||
This query will take effect for any screen smaller than 760px
|
||||
and also iPads specifically.
|
||||
*/
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
input[type='text'] {
|
||||
width: 100%; }
|
||||
.watch-table {
|
||||
/* Force table to not be like tables anymore */
|
||||
/* Force table to not be like tables anymore */
|
||||
@@ -385,14 +417,22 @@ and also iPads specifically.
|
||||
.pure-form-stacked > div:first-child {
|
||||
display: block; }
|
||||
|
||||
.login-form .inner {
|
||||
background: #fff;
|
||||
padding: 20px;
|
||||
border-radius: 5px; }
|
||||
|
||||
.tab-pane-inner {
|
||||
padding: 0px; }
|
||||
.tab-pane-inner:not(:target) {
|
||||
display: none; }
|
||||
.tab-pane-inner:target {
|
||||
display: block; }
|
||||
|
||||
.edit-form {
|
||||
min-width: 70%; }
|
||||
.edit-form .tab-pane-inner {
|
||||
padding: 0px; }
|
||||
.edit-form .tab-pane-inner:not(:target) {
|
||||
display: none; }
|
||||
.edit-form .tab-pane-inner:target {
|
||||
display: block; }
|
||||
min-width: 70%;
|
||||
/* so it cant overflow */
|
||||
max-width: 95%; }
|
||||
.edit-form .box-wrap {
|
||||
position: relative; }
|
||||
.edit-form .inner {
|
||||
@@ -401,3 +441,24 @@ and also iPads specifically.
|
||||
.edit-form #actions {
|
||||
display: block;
|
||||
background: #fff; }
|
||||
.edit-form .pure-form-message-inline {
|
||||
padding-left: 0; }
|
||||
|
||||
ul {
|
||||
padding-left: 1em;
|
||||
padding-top: 0px;
|
||||
margin-top: 4px; }
|
||||
|
||||
.time-check-widget tr {
|
||||
display: inline; }
|
||||
.time-check-widget tr input[type="number"] {
|
||||
width: 5em; }
|
||||
|
||||
#webdriver-override-options input[type="number"] {
|
||||
width: 5em; }
|
||||
|
||||
#api-key:hover {
|
||||
cursor: pointer; }
|
||||
|
||||
#api-key-copy {
|
||||
color: #0078e7; }
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
/*
|
||||
* -- BASE STYLES --
|
||||
* Most of these are inherited from Base, but I want to change a few.
|
||||
* npm run scss
|
||||
* nvm use v14.18.1
|
||||
* npm install
|
||||
* npm run build
|
||||
* or npm run watch
|
||||
*/
|
||||
body {
|
||||
color: #333;
|
||||
@@ -32,16 +35,21 @@ a.github-link {
|
||||
|
||||
section.content {
|
||||
padding-top: 5em;
|
||||
padding-bottom: 5em;
|
||||
padding-bottom: 1em;
|
||||
flex-direction: column;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
code {
|
||||
background: #eee;
|
||||
}
|
||||
|
||||
/* table related */
|
||||
.watch-table {
|
||||
width: 100%;
|
||||
font-size: 80%;
|
||||
|
||||
tr.unviewed {
|
||||
font-weight: bold;
|
||||
@@ -52,7 +60,6 @@ section.content {
|
||||
}
|
||||
|
||||
td {
|
||||
font-size: 80%;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
@@ -104,12 +111,12 @@ section.content {
|
||||
|
||||
body:after {
|
||||
content: "";
|
||||
background: linear-gradient(130deg, #ff7a18, #af002d 41.07%, #319197 76.05%)
|
||||
background: linear-gradient(130deg, #5ad8f7, #2f50af 41.07%, #9150bf 84.05%);
|
||||
}
|
||||
|
||||
body:after, body:before {
|
||||
display: block;
|
||||
height: 600px;
|
||||
height: 650px;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
@@ -122,11 +129,8 @@ body::after {
|
||||
}
|
||||
|
||||
body::before {
|
||||
// background-image set in base.html so it works with reverse proxies etc
|
||||
content: "";
|
||||
background-image: url(/static/images/gradient-border.png);
|
||||
}
|
||||
|
||||
body:before {
|
||||
background-size: cover
|
||||
}
|
||||
|
||||
@@ -233,20 +237,26 @@ body:after, body:before {
|
||||
background: rgba(255, 255, 255, .5);
|
||||
}
|
||||
}
|
||||
&.with-share-link {
|
||||
> *:hover {
|
||||
cursor:pointer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#notification-customisation {
|
||||
display: block;
|
||||
border: 1px solid #ccc;
|
||||
padding: 1rem;
|
||||
padding: 0.5rem;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
#toggle-customise-notifications {
|
||||
cursor: pointer;
|
||||
#notification-error-log {
|
||||
border: 1px solid #ccc;
|
||||
padding: 1rem;
|
||||
border-radius: 5px;
|
||||
overflow-wrap: break-word;
|
||||
}
|
||||
|
||||
|
||||
#token-table {
|
||||
&.pure-table td, &.pure-table th {
|
||||
font-size: 80%;
|
||||
@@ -267,6 +277,7 @@ body:after, body:before {
|
||||
}
|
||||
legend {
|
||||
color: #fff;
|
||||
font-weight: bold;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -319,12 +330,10 @@ footer {
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
|
||||
.sticky-tab {
|
||||
position: absolute;
|
||||
top: 80px;
|
||||
font-size: 8px;
|
||||
top: 60px;
|
||||
font-size: 65%;
|
||||
background: #fff;
|
||||
padding: 10px;
|
||||
&#left-sticky {
|
||||
@@ -333,6 +342,11 @@ footer {
|
||||
&#right-sticky {
|
||||
right: 0px;
|
||||
}
|
||||
&#hosted-sticky {
|
||||
right: 0px;
|
||||
top: 100px;
|
||||
font-weight: bold;
|
||||
}
|
||||
}
|
||||
|
||||
#new-version-text a {
|
||||
@@ -361,11 +375,27 @@ footer {
|
||||
|
||||
|
||||
.pure-form {
|
||||
fieldset {
|
||||
padding-top: 0px;
|
||||
ul {
|
||||
padding-bottom: 0px;
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
}
|
||||
.pure-control-group, .pure-group, .pure-controls {
|
||||
padding-bottom: 1em;
|
||||
div {
|
||||
margin: 0px;
|
||||
}
|
||||
.checkbox {
|
||||
> * {
|
||||
display: inline;
|
||||
vertical-align: middle;
|
||||
}
|
||||
> label {
|
||||
padding-left: 5px;
|
||||
}
|
||||
}
|
||||
}
|
||||
/* The input fields with errors */
|
||||
.error {
|
||||
@@ -395,14 +425,16 @@ footer {
|
||||
textarea {
|
||||
width: 100%;
|
||||
}
|
||||
ul#fetch_backend {
|
||||
margin: 0px;
|
||||
list-style: none;
|
||||
> li {
|
||||
> * {
|
||||
display: inline-block;
|
||||
.inline-radio {
|
||||
ul {
|
||||
margin: 0px;
|
||||
list-style: none;
|
||||
li {
|
||||
> * {
|
||||
display: inline-block;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -419,18 +451,35 @@ footer {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 800px) {
|
||||
|
||||
div.sticky-tab#hosted-sticky {
|
||||
top: 60px;
|
||||
left: 0px;
|
||||
right: auto;
|
||||
}
|
||||
|
||||
section.content {
|
||||
padding-top: 110px;
|
||||
}
|
||||
|
||||
// Make the tabs easier to hit, they will be all nice and horizontal
|
||||
div.tabs.collapsable ul li {
|
||||
display: block;
|
||||
border-radius: 0px;
|
||||
}
|
||||
|
||||
input[type='text'] {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
/*
|
||||
Max width before this PARTICULAR table gets nasty
|
||||
This query will take effect for any screen smaller than 760px
|
||||
and also iPads specifically.
|
||||
*/
|
||||
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
|
||||
input[type='text'] {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.watch-table {
|
||||
/* Force table to not be like tables anymore */
|
||||
thead, tbody, th, td, tr {
|
||||
@@ -544,9 +593,16 @@ $form-edge-padding: 20px;
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
.edit-form {
|
||||
min-width: 70%;
|
||||
.tab-pane-inner {
|
||||
|
||||
.login-form {
|
||||
.inner {
|
||||
background: #fff;;
|
||||
padding: $form-edge-padding;
|
||||
border-radius: 5px;
|
||||
}
|
||||
}
|
||||
|
||||
.tab-pane-inner {
|
||||
&:not(:target) {
|
||||
display: none;
|
||||
}
|
||||
@@ -555,7 +611,12 @@ $form-edge-padding: 20px;
|
||||
}
|
||||
// doesnt need padding because theres another row of buttons/activity
|
||||
padding: 0px;
|
||||
}
|
||||
}
|
||||
|
||||
.edit-form {
|
||||
min-width: 70%;
|
||||
/* so it cant overflow */
|
||||
max-width: 95%;
|
||||
.box-wrap {
|
||||
position: relative;
|
||||
}
|
||||
@@ -567,5 +628,39 @@ $form-edge-padding: 20px;
|
||||
display: block;
|
||||
background: #fff;
|
||||
}
|
||||
|
||||
.pure-form-message-inline {
|
||||
padding-left: 0;
|
||||
}
|
||||
}
|
||||
|
||||
ul {
|
||||
padding-left: 1em;
|
||||
padding-top: 0px;
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
.time-check-widget {
|
||||
tr {
|
||||
display: inline;
|
||||
input[type="number"] {
|
||||
width: 5em;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#webdriver-override-options {
|
||||
input[type="number"] {
|
||||
width: 5em;
|
||||
}
|
||||
}
|
||||
|
||||
#api-key {
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
|
||||
#api-key-copy {
|
||||
color: #0078e7;
|
||||
}
|
||||
|
||||
@@ -1,76 +1,46 @@
|
||||
from os import unlink, path, mkdir
|
||||
from flask import (
|
||||
flash
|
||||
)
|
||||
import json
|
||||
import uuid as uuid_builder
|
||||
from threading import Lock
|
||||
from copy import deepcopy
|
||||
|
||||
import logging
|
||||
import time
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import uuid as uuid_builder
|
||||
from copy import deepcopy
|
||||
from os import mkdir, path, unlink
|
||||
from threading import Lock
|
||||
import re
|
||||
import requests
|
||||
import secrets
|
||||
|
||||
from . model import App, Watch
|
||||
|
||||
# Is there an existing library to ensure some data store (JSON etc) is in sync with CRUD methods?
|
||||
# Open a github issue if you know something :)
|
||||
# https://stackoverflow.com/questions/6190468/how-to-trigger-function-on-value-change
|
||||
class ChangeDetectionStore:
|
||||
lock = Lock()
|
||||
# For general updates/writes that can wait a few seconds
|
||||
needs_write = False
|
||||
|
||||
# For when we edit, we should write to disk
|
||||
needs_write_urgent = False
|
||||
|
||||
def __init__(self, datastore_path="/datastore", include_default_watches=True, version_tag="0.0.0"):
|
||||
# Should only be active for docker
|
||||
# logging.basicConfig(filename='/dev/stdout', level=logging.INFO)
|
||||
self.needs_write = False
|
||||
self.datastore_path = datastore_path
|
||||
self.json_store_path = "{}/url-watches.json".format(self.datastore_path)
|
||||
self.proxy_list = None
|
||||
self.stop_thread = False
|
||||
|
||||
self.__data = {
|
||||
'note': "Hello! If you change this file manually, please be sure to restart your changedetection.io instance!",
|
||||
'watching': {},
|
||||
'settings': {
|
||||
'headers': {
|
||||
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36',
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
|
||||
'Accept-Encoding': 'gzip, deflate', # No support for brolti in python requests yet.
|
||||
'Accept-Language': 'en-GB,en-US;q=0.9,en;'
|
||||
},
|
||||
'requests': {
|
||||
'timeout': 15, # Default 15 seconds
|
||||
'minutes_between_check': 3 * 60, # Default 3 hours
|
||||
'workers': 10 # Number of threads, lower is better for slow connections
|
||||
},
|
||||
'application': {
|
||||
'password': False,
|
||||
'extract_title_as_title': False,
|
||||
'fetch_backend': 'html_requests',
|
||||
'notification_urls': [], # Apprise URL list
|
||||
# Custom notification content
|
||||
'notification_title': 'ChangeDetection.io Notification - {watch_url}',
|
||||
'notification_body': '{watch_url} had a change.'
|
||||
}
|
||||
}
|
||||
}
|
||||
self.__data = App.model()
|
||||
|
||||
# Base definition for all watchers
|
||||
self.generic_definition = {
|
||||
'url': None,
|
||||
'tag': None,
|
||||
'last_checked': 0,
|
||||
'last_changed': 0,
|
||||
'paused': False,
|
||||
'last_viewed': 0, # history key value of the last viewed via the [diff] link
|
||||
'newest_history_key': "",
|
||||
'title': None,
|
||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||
# Requires setting to None on submit if it's the same as the default
|
||||
'minutes_between_check': None,
|
||||
'previous_md5': "",
|
||||
'uuid': str(uuid_builder.uuid4()),
|
||||
'headers': {}, # Extra headers to send
|
||||
'history': {}, # Dict of timestamp and output stripped filename
|
||||
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
|
||||
'css_filter': "",
|
||||
'trigger_text': [], # List of text or regex to wait for until a change is detected
|
||||
'fetch_backend': None,
|
||||
}
|
||||
# deepcopy part of #569 - not sure why its needed exactly
|
||||
self.generic_definition = deepcopy(Watch.model())
|
||||
|
||||
if path.isfile('changedetectionio/source.txt'):
|
||||
with open('changedetectionio/source.txt') as f:
|
||||
@@ -118,7 +88,7 @@ class ChangeDetectionStore:
|
||||
self.add_watch(url='http://www.quotationspage.com/random.php', tag='test')
|
||||
self.add_watch(url='https://news.ycombinator.com/', tag='Tech news')
|
||||
self.add_watch(url='https://www.gov.uk/coronavirus', tag='Covid')
|
||||
self.add_watch(url='https://changedetection.io', tag='Tech news')
|
||||
self.add_watch(url='https://changedetection.io/CHANGELOG.txt')
|
||||
|
||||
self.__data['version_tag'] = version_tag
|
||||
|
||||
@@ -129,13 +99,33 @@ class ChangeDetectionStore:
|
||||
unlink(password_reset_lockfile)
|
||||
|
||||
if not 'app_guid' in self.__data:
|
||||
import sys
|
||||
import os
|
||||
import sys
|
||||
if "pytest" in sys.modules or "PYTEST_CURRENT_TEST" in os.environ:
|
||||
self.__data['app_guid'] = "test-" + str(uuid_builder.uuid4())
|
||||
else:
|
||||
self.__data['app_guid'] = str(uuid_builder.uuid4())
|
||||
|
||||
# Generate the URL access token for RSS feeds
|
||||
if not 'rss_access_token' in self.__data['settings']['application']:
|
||||
secret = secrets.token_hex(16)
|
||||
self.__data['settings']['application']['rss_access_token'] = secret
|
||||
|
||||
# Generate the API access token
|
||||
if not 'api_access_token' in self.__data['settings']['application']:
|
||||
secret = secrets.token_hex(16)
|
||||
self.__data['settings']['application']['api_access_token'] = secret
|
||||
|
||||
# Proxy list support - available as a selection in settings when text file is imported
|
||||
# CSV list
|
||||
# "name, address", or just "name"
|
||||
proxy_list_file = "{}/proxies.txt".format(self.datastore_path)
|
||||
if path.isfile(proxy_list_file):
|
||||
self.import_proxy_list(proxy_list_file)
|
||||
|
||||
# Bump the update version by running updates
|
||||
self.run_updates()
|
||||
|
||||
self.needs_write = True
|
||||
|
||||
# Finally start the thread that will manage periodic data saves to JSON
|
||||
@@ -148,6 +138,7 @@ class ChangeDetectionStore:
|
||||
|
||||
dates = list(self.__data['watching'][uuid]['history'].keys())
|
||||
# Convert to int, sort and back to str again
|
||||
# @todo replace datastore getter that does this automatically
|
||||
dates = [int(i) for i in dates]
|
||||
dates.sort(reverse=True)
|
||||
if len(dates):
|
||||
@@ -160,10 +151,14 @@ class ChangeDetectionStore:
|
||||
self.data['watching'][uuid].update({'last_viewed': int(timestamp)})
|
||||
self.needs_write = True
|
||||
|
||||
def remove_password(self):
|
||||
self.__data['settings']['application']['password'] = False
|
||||
self.needs_write = True
|
||||
|
||||
def update_watch(self, uuid, update_obj):
|
||||
|
||||
# Skip if 'paused' state
|
||||
if self.__data['watching'][uuid]['paused']:
|
||||
# It's possible that the watch could be deleted before update
|
||||
if not self.__data['watching'].get(uuid):
|
||||
return
|
||||
|
||||
with self.lock:
|
||||
@@ -180,6 +175,17 @@ class ChangeDetectionStore:
|
||||
|
||||
self.needs_write = True
|
||||
|
||||
@property
|
||||
def threshold_seconds(self):
|
||||
seconds = 0
|
||||
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
|
||||
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
|
||||
for m, n in mtable.items():
|
||||
x = self.__data['settings']['requests']['time_between_check'].get(m)
|
||||
if x:
|
||||
seconds += x * n
|
||||
return max(seconds, minimum_seconds_recheck_time)
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
has_unviewed = False
|
||||
@@ -193,13 +199,15 @@ class ChangeDetectionStore:
|
||||
has_unviewed = True
|
||||
|
||||
# #106 - Be sure this is None on empty string, False, None, etc
|
||||
if not self.__data['watching'][uuid]['title']:
|
||||
self.__data['watching'][uuid]['title'] = None
|
||||
|
||||
# Default var for fetch_backend
|
||||
if not self.__data['watching'][uuid]['fetch_backend']:
|
||||
self.__data['watching'][uuid]['fetch_backend'] = self.__data['settings']['application']['fetch_backend']
|
||||
|
||||
# Re #152, Return env base_url if not overriden, @todo also prefer the proxy pass url
|
||||
env_base_url = os.getenv('BASE_URL','')
|
||||
if not self.__data['settings']['application']['base_url']:
|
||||
self.__data['settings']['application']['base_url'] = env_base_url.strip('" ')
|
||||
|
||||
self.__data['has_unviewed'] = has_unviewed
|
||||
|
||||
return self.__data
|
||||
@@ -207,7 +215,8 @@ class ChangeDetectionStore:
|
||||
def get_all_tags(self):
|
||||
tags = []
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
|
||||
if watch['tag'] is None:
|
||||
continue
|
||||
# Support for comma separated list of tags.
|
||||
for tag in watch['tag'].split(','):
|
||||
tag = tag.strip()
|
||||
@@ -240,7 +249,15 @@ class ChangeDetectionStore:
|
||||
|
||||
del self.data['watching'][uuid]
|
||||
|
||||
self.needs_write = True
|
||||
self.needs_write_urgent = True
|
||||
|
||||
# Clone a watch by UUID
|
||||
def clone(self, uuid):
|
||||
url = self.data['watching'][uuid]['url']
|
||||
tag = self.data['watching'][uuid]['tag']
|
||||
extras = self.data['watching'][uuid]
|
||||
new_uuid = self.add_watch(url=url, tag=tag, extras=extras)
|
||||
return new_uuid
|
||||
|
||||
def url_exists(self, url):
|
||||
|
||||
@@ -256,59 +273,68 @@ class ChangeDetectionStore:
|
||||
return self.data['watching'][uuid].get(val)
|
||||
|
||||
# Remove a watchs data but keep the entry (URL etc)
|
||||
def scrub_watch(self, uuid, limit_timestamp = False):
|
||||
def scrub_watch(self, uuid):
|
||||
import pathlib
|
||||
|
||||
import hashlib
|
||||
del_timestamps = []
|
||||
self.__data['watching'][uuid].update({'history': {}, 'last_checked': 0, 'last_changed': 0, 'newest_history_key': 0, 'previous_md5': False})
|
||||
self.needs_write_urgent = True
|
||||
|
||||
changes_removed = 0
|
||||
for item in pathlib.Path(self.datastore_path).rglob(uuid+"/*.txt"):
|
||||
unlink(item)
|
||||
|
||||
for timestamp, path in self.data['watching'][uuid]['history'].items():
|
||||
if not limit_timestamp or (limit_timestamp is not False and int(timestamp) > limit_timestamp):
|
||||
self.unlink_history_file(path)
|
||||
del_timestamps.append(timestamp)
|
||||
changes_removed += 1
|
||||
def add_watch(self, url, tag="", extras=None, write_to_disk_now=True):
|
||||
if extras is None:
|
||||
extras = {}
|
||||
# should always be str
|
||||
if tag is None or not tag:
|
||||
tag=''
|
||||
|
||||
if not limit_timestamp:
|
||||
self.data['watching'][uuid]['last_checked'] = 0
|
||||
self.data['watching'][uuid]['last_changed'] = 0
|
||||
self.data['watching'][uuid]['previous_md5'] = 0
|
||||
# Incase these are copied across, assume it's a reference and deepcopy()
|
||||
apply_extras = deepcopy(extras)
|
||||
|
||||
# Was it a share link? try to fetch the data
|
||||
if (url.startswith("https://changedetection.io/share/")):
|
||||
try:
|
||||
r = requests.request(method="GET",
|
||||
url=url,
|
||||
# So we know to return the JSON instead of the human-friendly "help" page
|
||||
headers={'App-Guid': self.__data['app_guid']})
|
||||
res = r.json()
|
||||
|
||||
for timestamp in del_timestamps:
|
||||
del self.data['watching'][uuid]['history'][str(timestamp)]
|
||||
# List of permisable stuff we accept from the wild internet
|
||||
for k in ['url', 'tag',
|
||||
'paused', 'title',
|
||||
'previous_md5', 'headers',
|
||||
'body', 'method',
|
||||
'ignore_text', 'css_filter',
|
||||
'subtractive_selectors', 'trigger_text',
|
||||
'extract_title_as_title']:
|
||||
if res.get(k):
|
||||
apply_extras[k] = res[k]
|
||||
|
||||
# If there was a limitstamp, we need to reset some meta data about the entry
|
||||
# This has to happen after we remove the others from the list
|
||||
if limit_timestamp:
|
||||
newest_key = self.get_newest_history_key(uuid)
|
||||
if newest_key:
|
||||
self.data['watching'][uuid]['last_checked'] = int(newest_key)
|
||||
# @todo should be the original value if it was less than newest key
|
||||
self.data['watching'][uuid]['last_changed'] = int(newest_key)
|
||||
try:
|
||||
with open(self.data['watching'][uuid]['history'][str(newest_key)], "rb") as fp:
|
||||
content = fp.read()
|
||||
self.data['watching'][uuid]['previous_md5'] = hashlib.md5(content).hexdigest()
|
||||
except (FileNotFoundError, IOError):
|
||||
self.data['watching'][uuid]['previous_md5'] = False
|
||||
pass
|
||||
except Exception as e:
|
||||
logging.error("Error fetching metadata for shared watch link", url, str(e))
|
||||
flash("Error fetching metadata for {}".format(url), 'error')
|
||||
return False
|
||||
|
||||
self.needs_write = True
|
||||
return changes_removed
|
||||
|
||||
def add_watch(self, url, tag):
|
||||
with self.lock:
|
||||
# @todo use a common generic version of this
|
||||
new_uuid = str(uuid_builder.uuid4())
|
||||
_blank = deepcopy(self.generic_definition)
|
||||
_blank.update({
|
||||
# #Re 569
|
||||
# Not sure why deepcopy was needed here, sometimes new watches would appear to already have 'history' set
|
||||
# I assumed this would instantiate a new object but somehow an existing dict was getting used
|
||||
new_watch = deepcopy(Watch.model({
|
||||
'url': url,
|
||||
'tag': tag,
|
||||
'uuid': new_uuid
|
||||
})
|
||||
'tag': tag
|
||||
}))
|
||||
|
||||
self.data['watching'][new_uuid] = _blank
|
||||
|
||||
for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']:
|
||||
if k in apply_extras:
|
||||
del apply_extras[k]
|
||||
|
||||
new_watch.update(apply_extras)
|
||||
self.__data['watching'][new_uuid]=new_watch
|
||||
|
||||
# Get the directory ready
|
||||
output_path = "{}/{}".format(self.datastore_path, new_uuid)
|
||||
@@ -317,7 +343,8 @@ class ChangeDetectionStore:
|
||||
except FileExistsError:
|
||||
print(output_path, "already exists.")
|
||||
|
||||
self.sync_to_json()
|
||||
if write_to_disk_now:
|
||||
self.sync_to_json()
|
||||
return new_uuid
|
||||
|
||||
# Save some text file to the appropriate path and bump the history
|
||||
@@ -326,6 +353,10 @@ class ChangeDetectionStore:
|
||||
import uuid
|
||||
|
||||
output_path = "{}/{}".format(self.datastore_path, watch_uuid)
|
||||
# Incase the operator deleted it, check and create.
|
||||
if not os.path.isdir(output_path):
|
||||
mkdir(output_path)
|
||||
|
||||
fname = "{}/{}.stripped.txt".format(output_path, uuid.uuid4())
|
||||
with open(fname, 'wb') as f:
|
||||
f.write(contents)
|
||||
@@ -333,23 +364,47 @@ class ChangeDetectionStore:
|
||||
|
||||
return fname
|
||||
|
||||
def sync_to_json(self):
|
||||
print("Saving..")
|
||||
data ={}
|
||||
def get_screenshot(self, watch_uuid):
|
||||
output_path = "{}/{}".format(self.datastore_path, watch_uuid)
|
||||
fname = "{}/last-screenshot.png".format(output_path)
|
||||
if path.isfile(fname):
|
||||
return fname
|
||||
|
||||
return False
|
||||
|
||||
# Save as PNG, PNG is larger but better for doing visual diff in the future
|
||||
def save_screenshot(self, watch_uuid, screenshot: bytes):
|
||||
output_path = "{}/{}".format(self.datastore_path, watch_uuid)
|
||||
fname = "{}/last-screenshot.png".format(output_path)
|
||||
with open(fname, 'wb') as f:
|
||||
f.write(screenshot)
|
||||
f.close()
|
||||
|
||||
def sync_to_json(self):
|
||||
logging.info("Saving JSON..")
|
||||
print("Saving JSON..")
|
||||
try:
|
||||
data = deepcopy(self.__data)
|
||||
except RuntimeError:
|
||||
time.sleep(0.5)
|
||||
print ("! Data changed when writing to JSON, trying again..")
|
||||
except RuntimeError as e:
|
||||
# Try again in 15 seconds
|
||||
time.sleep(15)
|
||||
logging.error ("! Data changed when writing to JSON, trying again.. %s", str(e))
|
||||
self.sync_to_json()
|
||||
return
|
||||
else:
|
||||
with open(self.json_store_path, 'w') as json_file:
|
||||
json.dump(data, json_file, indent=4)
|
||||
logging.info("Re-saved index")
|
||||
|
||||
try:
|
||||
# Re #286 - First write to a temp file, then confirm it looks OK and rename it
|
||||
# This is a fairly basic strategy to deal with the case that the file is corrupted,
|
||||
# system was out of memory, out of RAM etc
|
||||
with open(self.json_store_path+".tmp", 'w') as json_file:
|
||||
json.dump(data, json_file, indent=4)
|
||||
os.replace(self.json_store_path+".tmp", self.json_store_path)
|
||||
except Exception as e:
|
||||
logging.error("Error writing JSON!! (Main JSON file save was skipped) : %s", str(e))
|
||||
|
||||
self.needs_write = False
|
||||
self.needs_write_urgent = False
|
||||
|
||||
# Thread runner, this helps with thread/write issues when there are many operations that want to update the JSON
|
||||
# by just running periodically in one thread, according to python, dict updates are threadsafe.
|
||||
@@ -360,9 +415,15 @@ class ChangeDetectionStore:
|
||||
print("Shutting down datastore thread")
|
||||
return
|
||||
|
||||
if self.needs_write:
|
||||
if self.needs_write or self.needs_write_urgent:
|
||||
self.sync_to_json()
|
||||
time.sleep(3)
|
||||
|
||||
# Once per minute is enough, more and it can cause high CPU usage
|
||||
# better here is to use something like self.app.config.exit.wait(1), but we cant get to 'app' from here
|
||||
for i in range(120):
|
||||
time.sleep(0.5)
|
||||
if self.stop_thread or self.needs_write_urgent:
|
||||
break
|
||||
|
||||
# Go through the datastore path and remove any snapshots that are not mentioned in the index
|
||||
# This usually is not used, but can be handy.
|
||||
@@ -375,8 +436,71 @@ class ChangeDetectionStore:
|
||||
index.append(self.data['watching'][uuid]['history'][str(id)])
|
||||
|
||||
import pathlib
|
||||
|
||||
# Only in the sub-directories
|
||||
for item in pathlib.Path(self.datastore_path).rglob("*/*txt"):
|
||||
if not str(item) in index:
|
||||
print ("Removing",item)
|
||||
unlink(item)
|
||||
for uuid in self.data['watching']:
|
||||
for item in pathlib.Path(self.datastore_path).rglob(uuid+"/*.txt"):
|
||||
if not str(item) in index:
|
||||
print ("Removing",item)
|
||||
unlink(item)
|
||||
|
||||
def import_proxy_list(self, filename):
|
||||
import csv
|
||||
with open(filename, newline='') as f:
|
||||
reader = csv.reader(f, skipinitialspace=True)
|
||||
# @todo This loop can could be improved
|
||||
l = []
|
||||
for row in reader:
|
||||
if len(row):
|
||||
if len(row)>=2:
|
||||
l.append(tuple(row[:2]))
|
||||
else:
|
||||
l.append(tuple([row[0], row[0]]))
|
||||
self.proxy_list = l if len(l) else None
|
||||
|
||||
|
||||
# Run all updates
|
||||
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
|
||||
# So therefor - each `update_n` should be very careful about checking if it needs to actually run
|
||||
# Probably we should bump the current update schema version with each tag release version?
|
||||
def run_updates(self):
|
||||
import inspect
|
||||
import shutil
|
||||
|
||||
updates_available = []
|
||||
for i, o in inspect.getmembers(self, predicate=inspect.ismethod):
|
||||
m = re.search(r'update_(\d+)$', i)
|
||||
if m:
|
||||
updates_available.append(int(m.group(1)))
|
||||
updates_available.sort()
|
||||
|
||||
for update_n in updates_available:
|
||||
if update_n > self.__data['settings']['application']['schema_version']:
|
||||
print ("Applying update_{}".format((update_n)))
|
||||
# Wont exist on fresh installs
|
||||
if os.path.exists(self.json_store_path):
|
||||
shutil.copyfile(self.json_store_path, self.datastore_path+"/url-watches-before-{}.json".format(update_n))
|
||||
|
||||
try:
|
||||
update_method = getattr(self, "update_{}".format(update_n))()
|
||||
except Exception as e:
|
||||
print("Error while trying update_{}".format((update_n)))
|
||||
print(e)
|
||||
# Don't run any more updates
|
||||
return
|
||||
else:
|
||||
# Bump the version, important
|
||||
self.__data['settings']['application']['schema_version'] = update_n
|
||||
|
||||
# Convert minutes to seconds on settings and each watch
|
||||
def update_1(self):
|
||||
if self.data['settings']['requests'].get('minutes_between_check'):
|
||||
self.data['settings']['requests']['time_between_check']['minutes'] = self.data['settings']['requests']['minutes_between_check']
|
||||
# Remove the default 'hours' that is set from the model
|
||||
self.data['settings']['requests']['time_between_check']['hours'] = None
|
||||
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
if 'minutes_between_check' in watch:
|
||||
# Only upgrade individual watch time if it was set
|
||||
if watch.get('minutes_between_check', False):
|
||||
self.data['watching'][uuid]['time_between_check']['minutes'] = watch['minutes_between_check']
|
||||
|
||||
100
changedetectionio/templates/_common_fields.jinja
Normal file
100
changedetectionio/templates/_common_fields.jinja
Normal file
@@ -0,0 +1,100 @@
|
||||
|
||||
{% from '_helpers.jinja' import render_field %}
|
||||
|
||||
{% macro render_common_settings_form(form, current_base_url, emailprefix) %}
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_urls, rows=5, placeholder="Examples:
|
||||
Gitter - gitter://token/room
|
||||
Office365 - o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail
|
||||
AWS SNS - sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo
|
||||
SMTPS - mailtos://user:pass@mail.domain.com?to=receivingAddress@example.com", class="notification-urls")
|
||||
}}
|
||||
<div class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Use <a target=_new href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service! <i><a target=_new href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes">Please read the notification services wiki here for important configuration notes</a></i>.</li>
|
||||
<li><code>discord://</code> only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li>
|
||||
<li><code>tgram://</code> bots cant send messages to other bots, so you should specify chat ID of non-bot user.</li>
|
||||
<li>Go here for <a href="{{url_for('notification_logs')}}">notification debug logs</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<br/>
|
||||
<a id="send-test-notification" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Send test notification</a>
|
||||
{% if emailprefix %}
|
||||
<a id="add-email-helper" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Add email</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div id="notification-customisation" class="pure-control-group">
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_title, class="m-d notification-title") }}
|
||||
<span class="pure-form-message-inline">Title for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_body , rows=5, class="notification-body") }}
|
||||
<span class="pure-form-message-inline">Body for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_format , rows=5, class="notification-format") }}
|
||||
<span class="pure-form-message-inline">Format for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-controls">
|
||||
<span class="pure-form-message-inline">
|
||||
These tokens can be used in the notification body and title to customise the notification text.
|
||||
|
||||
<table class="pure-table" id="token-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Token</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><code>{base_url}</code></td>
|
||||
<td>The URL of the changedetection.io instance you are running.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_url}</code></td>
|
||||
<td>The URL being watched.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_uuid}</code></td>
|
||||
<td>The UUID of the watch.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_title}</code></td>
|
||||
<td>The title of the watch.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_tag}</code></td>
|
||||
<td>The tag of the watch.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{preview_url}</code></td>
|
||||
<td>The URL of the preview page generated by changedetection.io.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{diff}</code></td>
|
||||
<td>The diff output - differences only</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{diff_full}</code></td>
|
||||
<td>The diff output - full difference output</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{diff_url}</code></td>
|
||||
<td>The URL of the diff page generated by changedetection.io.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{current_snapshot}</code></td>
|
||||
<td>The current snapshot value, useful when combined with JSON or CSS filters
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<br/>
|
||||
URLs generated by changedetection.io (such as <code>{diff_url}</code>) require the <code>BASE_URL</code> environment variable set.<br/>
|
||||
Your <code>BASE_URL</code> var is currently "{{current_base_url}}"
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
@@ -1,3 +1,30 @@
|
||||
{% macro render_field(field) %}
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
||||
|
||||
{% if field.errors %}
|
||||
<ul class=errors>
|
||||
{% for error in field.errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro render_checkbox_field(field) %}
|
||||
<div class="checkbox {% if field.errors %} error {% endif %}">
|
||||
{{ field(**kwargs)|safe }} {{ field.label }}
|
||||
{% if field.errors %}
|
||||
<ul class=errors>
|
||||
{% for error in field.errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro render_field(field) %}
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||
@@ -22,4 +49,9 @@
|
||||
</ul>
|
||||
{% endif %}
|
||||
</span>
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro render_button(field) %}
|
||||
{{ field(**kwargs)|safe }}
|
||||
{% endmacro %}
|
||||
@@ -5,6 +5,7 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="description" content="Self hosted website change detection.">
|
||||
<title>Change Detection{{extra_title}}</title>
|
||||
<link rel="alternate" type="application/rss+xml" title="Changedetection.io » Feed{% if active_tag %}- {{active_tag}}{% endif %}" href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}" />
|
||||
<link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='pure-min.css')}}">
|
||||
<link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='styles.css')}}">
|
||||
{% if extra_stylesheets %}
|
||||
@@ -12,7 +13,15 @@
|
||||
<link rel="stylesheet" href="{{ m }}?ver=1000">
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
<style>
|
||||
body::before {
|
||||
background-image: url({{url_for('static_content', group='images', filename='gradient-border.png')}});
|
||||
}
|
||||
</style>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<div class="header">
|
||||
@@ -35,13 +44,13 @@
|
||||
{% if current_user.is_authenticated or not has_password %}
|
||||
{% if not current_diff_url %}
|
||||
<li class="pure-menu-item">
|
||||
<a href="{{ url_for('get_backup')}}" class="pure-menu-link">BACKUP</a>
|
||||
<a href="{{ url_for('settings_page')}}" class="pure-menu-link">SETTINGS</a>
|
||||
</li>
|
||||
<li class="pure-menu-item">
|
||||
<a href="{{ url_for('import_page')}}" class="pure-menu-link">IMPORT</a>
|
||||
</li>
|
||||
<li class="pure-menu-item">
|
||||
<a href="{{ url_for('settings_page')}}" class="pure-menu-link">SETTINGS</a>
|
||||
<a href="{{ url_for('get_backup')}}" class="pure-menu-link">BACKUP</a>
|
||||
</li>
|
||||
{% else %}
|
||||
<li class="pure-menu-item">
|
||||
@@ -68,7 +77,7 @@
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if hosted_sticky %}<div class="sticky-tab" id="hosted-sticky"><a href="https://lemonade.changedetection.io/start?ref={{guid}}">Let us host your instance!</a></div>{% endif %}
|
||||
{% if left_sticky %}<div class="sticky-tab" id="left-sticky"><a href="{{url_for('preview_page', uuid=uuid)}}">Show current snapshot</a></div> {% endif %}
|
||||
{% if right_sticky %}<div class="sticky-tab" id="right-sticky">{{ right_sticky }}</div> {% endif %}
|
||||
<section class="content">
|
||||
@@ -85,6 +94,13 @@
|
||||
</ul>
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
|
||||
{% if session['share-link'] %}
|
||||
<ul class="messages with-share-link">
|
||||
<li class="message">Share this link: <span id="share-link">{{ session['share-link'] }}</span> <img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='copy.svg')}}" /></li>
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
{% endblock %}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{% extends 'base.html' %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<div id="settings">
|
||||
<h1>Differences</h1>
|
||||
<form class="pure-form " action="" method="GET">
|
||||
@@ -35,20 +34,45 @@
|
||||
<div id="diff-jump">
|
||||
<a onclick="next_diff();">Jump</a>
|
||||
</div>
|
||||
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<div class="tabs">
|
||||
<ul>
|
||||
<li class="tab" id="default-tab"><a href="#text">Text</a></li>
|
||||
{% if screenshot %}
|
||||
<li class="tab"><a href="#screenshot">Current screenshot</a></li>
|
||||
{% endif %}
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div id="diff-ui">
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<!-- just proof of concept copied straight from github.com/kpdecker/jsdiff -->
|
||||
<td id="a" style="display: none;">{{previous}}</td>
|
||||
<td id="b" style="display: none;">{{newest}}</td>
|
||||
<td id="diff-col">
|
||||
<span id="result"></span>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
Diff algorithm from the amazing <a href="https://github.com/kpdecker/jsdiff">github.com/kpdecker/jsdiff</a>
|
||||
<div class="tab-pane-inner" id="text">
|
||||
<div class="tip">Pro-tip: Use <strong>show current snapshot</strong> tab to visualise what will be ignored.
|
||||
</div>
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<!-- just proof of concept copied straight from github.com/kpdecker/jsdiff -->
|
||||
<td id="a" style="display: none;">{{previous}}</td>
|
||||
<td id="b" style="display: none;">{{newest}}</td>
|
||||
<td id="diff-col">
|
||||
<span id="result"></span>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
Diff algorithm from the amazing <a href="https://github.com/kpdecker/jsdiff">github.com/kpdecker/jsdiff</a>
|
||||
</div>
|
||||
|
||||
{% if screenshot %}
|
||||
<div class="tab-pane-inner" id="screenshot">
|
||||
<p>
|
||||
<i>For now, only the most recent screenshot is saved and displayed.</i>
|
||||
</p>
|
||||
|
||||
<img src="{{url_for('static_content', group='screenshot', filename=uuid)}}">
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
@@ -1,27 +1,38 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.jinja' import render_field %}
|
||||
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<script>
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||
{% if emailprefix %}
|
||||
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
|
||||
{% endif %}
|
||||
</script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||
|
||||
<div class="edit-form monospaced-textarea">
|
||||
|
||||
<div class="tabs">
|
||||
<div class="tabs collapsable">
|
||||
<ul>
|
||||
<li class="tab" id="default-tab"><a href="#general">General</a></li>
|
||||
<li class="tab"><a href="#request">Request</a></li>
|
||||
<li class="tab"><a href="#filters-and-triggers">Filters & Triggers</a></li>
|
||||
<li class="tab"><a href="#notifications">Notifications</a></li>
|
||||
<li class="tab"><a href="#filters">Filters</a></li>
|
||||
<li class="tab"><a href="#triggers">Triggers</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="box-wrap inner">
|
||||
<form class="pure-form pure-form-stacked"
|
||||
action="{{ url_for('edit_page', uuid=uuid, next = request.args.get('next') ) }}" method="POST">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
||||
|
||||
<div class="tab-pane-inner" id="general">
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.url, placeholder="https://...", required=true, class="m-d") }}
|
||||
<span class="pure-form-message-inline">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.title, class="m-d") }}
|
||||
@@ -31,8 +42,8 @@
|
||||
<span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.minutes_between_check) }}
|
||||
{% if using_default_minutes %}
|
||||
{{ render_field(form.time_between_check, class="time-check-widget") }}
|
||||
{% if has_empty_checktime %}
|
||||
<span class="pure-form-message-inline">Currently using the <a
|
||||
href="{{ url_for('settings_page', uuid=uuid) }}">default global settings</a>, change to another value if you want to be specific.</span>
|
||||
{% else %}
|
||||
@@ -40,90 +51,158 @@
|
||||
href="{{ url_for('settings_page', uuid=uuid) }}">default global settings</a>.</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
<fieldset class="pure-group">
|
||||
{{ render_field(form.headers, rows=5, placeholder="Example
|
||||
Cookie: foobar
|
||||
User-Agent: wonderbra 1.0") }}
|
||||
<span class="pure-form-message-inline">
|
||||
Note: ONLY used by Basic fast Plaintext/HTTP Client
|
||||
</span>
|
||||
</fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.fetch_backend) }}
|
||||
{{ render_checkbox_field(form.extract_title_as_title) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="request">
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_field(form.fetch_backend, class="fetch-backend") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<p>Use the <strong>Basic</strong> method (default) where your watched sites don't need Javascript to render.</p>
|
||||
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server. </p>
|
||||
<p>Use the <strong>Basic</strong> method (default) where your watched site doesn't need Javascript to render.</p>
|
||||
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>
|
||||
</span>
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
<div class="tab-pane-inner" id="notifications">
|
||||
<fieldset>
|
||||
{% if form.proxy %}
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_field(form.proxy, class="fetch-backend-proxy") }}
|
||||
<span class="pure-form-message-inline">
|
||||
Choose a proxy for this watch
|
||||
</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
<fieldset id="webdriver-override-options">
|
||||
<div class="pure-form-message-inline">
|
||||
<strong>If you're having trouble waiting for the page to be fully rendered (text missing etc), try increasing the 'wait' time here.</strong>
|
||||
<br/>
|
||||
This will wait <i>n</i> seconds before extracting the text.
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_urls, rows=5, placeholder="Examples:
|
||||
Gitter - gitter://token/room
|
||||
Office365 - o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail
|
||||
AWS SNS - sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo
|
||||
SMTPS - mailtos://user:pass@mail.domain.com?to=receivingAddress@example.com
|
||||
") }}
|
||||
<span class="pure-form-message-inline">Use <a target=_new
|
||||
href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service!</span>
|
||||
<span class="pure-form-message-inline">Note: This overrides any global settings notification URLs</span>
|
||||
{{ render_field(form.webdriver_delay) }}
|
||||
</div>
|
||||
{% if using_global_webdriver_wait %}
|
||||
<div class="pure-form-message-inline">
|
||||
<strong>Using the current global default settings</strong>
|
||||
</div>
|
||||
{% endif %}
|
||||
</fieldset>
|
||||
<fieldset class="pure-group" id="requests-override-options">
|
||||
<div class="pure-form-message-inline">
|
||||
<strong>Request override is currently only used by the <i>Basic fast Plaintext/HTTP Client</i> method.</strong>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.method) }}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.headers, rows=5, placeholder="Example
|
||||
Cookie: foobar
|
||||
User-Agent: wonderbra 1.0") }}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.body, rows=5, placeholder="Example
|
||||
{
|
||||
\"name\":\"John\",
|
||||
\"age\":30,
|
||||
\"car\":null
|
||||
}") }}
|
||||
</div>
|
||||
<div>
|
||||
{{ render_checkbox_field(form.ignore_status_codes) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
<br/>
|
||||
</div>
|
||||
|
||||
<div class="pure-controls">
|
||||
{{ render_field(form.trigger_check, rows=5) }}
|
||||
<div class="tab-pane-inner" id="notifications">
|
||||
<strong>Note: <i>These settings override the global settings for this watch.</i></strong>
|
||||
<fieldset>
|
||||
<div class="field-group">
|
||||
{{ render_common_settings_form(form, current_base_url, emailprefix) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
<div class="tab-pane-inner" id="filters">
|
||||
<fieldset>
|
||||
|
||||
<div class="tab-pane-inner" id="filters-and-triggers">
|
||||
<div class="pure-control-group">
|
||||
<strong>Pro-tips:</strong><br/>
|
||||
<ul>
|
||||
<li>
|
||||
Use the preview page to see your filters and triggers highlighted.
|
||||
</li>
|
||||
<li>
|
||||
Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.css_filter, placeholder=".class-name or #some-id, or other CSS selector rule.",
|
||||
class="m-d") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li>
|
||||
<li>JSON - Limit text to this JSON rule, using <a href="https://pypi.org/project/jsonpath-ng/">JSONPath</a>, prefix with <b>"json:"</b>, <a
|
||||
<li>JSON - Limit text to this JSON rule, using <a href="https://pypi.org/project/jsonpath-ng/">JSONPath</a>, prefix with <code>"json:"</code>, use <code>json:$</code> to force re-formatting if required, <a
|
||||
href="https://jsonpath.com/" target="new">test your JSONPath here</a></li>
|
||||
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash, example <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
|
||||
href="http://xpather.com/" target="new">test your XPath here</a></li>
|
||||
</ul>
|
||||
Please be sure that you thoroughly understand how to write CSS or JSONPath selector rules before filing an issue on GitHub! <a
|
||||
Please be sure that you thoroughly understand how to write CSS or JSONPath, XPath selector rules before filing an issue on GitHub! <a
|
||||
href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br/>
|
||||
</span>
|
||||
</div>
|
||||
|
||||
</fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.subtractive_selectors, rows=5, placeholder="header
|
||||
footer
|
||||
nav
|
||||
.stockticker") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li> Remove HTML element(s) by CSS selector before text conversion. </li>
|
||||
<li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li>
|
||||
</ul>
|
||||
</span>
|
||||
</div>
|
||||
<fieldset class="pure-group">
|
||||
{{ render_field(form.ignore_text, rows=5, placeholder="Some text to ignore in a line
|
||||
/some.regex\d{2}/ for case-INsensitive regex
|
||||
") }}
|
||||
<span class="pure-form-message-inline">
|
||||
Each line processed separately, any line matching will be ignored.<br/>
|
||||
Regular Expression support, wrap the line in forward slash <b>/regex/</b>.
|
||||
<ul>
|
||||
<li>Each line processed separately, any line matching will be ignored (removed before creating the checksum)</li>
|
||||
<li>Regular Expression support, wrap the line in forward slash <code>/regex/</code></li>
|
||||
<li>Changing this will affect the comparison checksum which may trigger an alert</li>
|
||||
<li>Use the preview/show current tab to see ignores</li>
|
||||
</ul>
|
||||
</span>
|
||||
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="triggers">
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.trigger_text, rows=5, placeholder="Some text to wait for in a line
|
||||
/some.regex\d{2}/ for case-INsensitive regex
|
||||
") }}</br>
|
||||
<span class="pure-form-message-inline">Text to wait for before triggering a change/notification, all text and regex are tested <i>case-insensitive</i>.</span><br/>
|
||||
<span class="pure-form-message-inline">Trigger text is processed from the result-text that comes out of any <a href="#filters">CSS/JSON Filters</a> for this watch</span>.<br/>
|
||||
<span class="pure-form-message-inline">Each line is process separately (think of each line as "OR")</span><br/>
|
||||
<span class="pure-form-message-inline">Note: Wrap in forward slash / to use regex example: <span style="font-family: monospace; background: #eee">/foo\d/</span> </span>
|
||||
") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Text to wait for before triggering a change/notification, all text and regex are tested <i>case-insensitive</i>.</li>
|
||||
<li>Trigger text is processed from the result-text that comes out of any CSS/JSON Filters for this watch</li>
|
||||
<li>Each line is processed separately (think of each line as "OR")</li>
|
||||
<li>Note: Wrap in forward slash / to use regex example: <code>/foo\d/</code></li>
|
||||
</ul>
|
||||
</span>
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div id="actions">
|
||||
<div class="pure-control-group">
|
||||
|
||||
<button type="submit" class="pure-button pure-button-primary">Save</button>
|
||||
<a href="{{url_for('api_delete', uuid=uuid)}}"
|
||||
{{ render_button(form.save_button) }} {{ render_button(form.save_and_preview_button) }}
|
||||
|
||||
<a href="{{url_for('form_delete', uuid=uuid)}}"
|
||||
class="pure-button button-small button-error ">Delete</a>
|
||||
<a href="{{url_for('form_clone', uuid=uuid)}}"
|
||||
class="pure-button button-small ">Create Copy</a>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
@@ -1,23 +1,86 @@
|
||||
{% extends 'base.html' %}
|
||||
|
||||
{% block content %}
|
||||
<div class="edit-form">
|
||||
<div class="inner">
|
||||
<form class="pure-form pure-form-aligned" action="{{url_for('import_page')}}" method="POST">
|
||||
<fieldset class="pure-group">
|
||||
<legend>One URL per line, URLs that do not pass validation will stay in the textarea.</legend>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<div class="edit-form monospaced-textarea">
|
||||
|
||||
<textarea name="urls" class="pure-input-1-2" placeholder="https://"
|
||||
style="width: 100%;
|
||||
<div class="tabs collapsable">
|
||||
<ul>
|
||||
<li class="tab" id="default-tab"><a href="#url-list">URL List</a></li>
|
||||
<li class="tab"><a href="#distill-io">Distill.io</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="box-wrap inner">
|
||||
<form class="pure-form pure-form-aligned" action="{{url_for('import_page')}}" method="POST">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
||||
<div class="tab-pane-inner" id="url-list">
|
||||
<fieldset class="pure-group">
|
||||
<legend>
|
||||
Enter one URL per line, and optionally add tags for each URL after a space, delineated by comma
|
||||
(,):
|
||||
<br>
|
||||
<code>https://example.com tag1, tag2, last tag</code>
|
||||
<br>
|
||||
URLs which do not pass validation will stay in the textarea.
|
||||
</legend>
|
||||
|
||||
|
||||
<textarea name="urls" class="pure-input-1-2" placeholder="https://"
|
||||
style="width: 100%;
|
||||
font-family:monospace;
|
||||
white-space: pre;
|
||||
overflow-wrap: normal;
|
||||
overflow-x: scroll;" rows="25">{{ remaining }}</textarea>
|
||||
</fieldset>
|
||||
overflow-x: scroll;" rows="25">{{ import_url_list_remaining }}</textarea>
|
||||
</fieldset>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="distill-io">
|
||||
|
||||
|
||||
<fieldset class="pure-group">
|
||||
<legend>
|
||||
Copy and Paste your Distill.io watch 'export' file, this should be a JSON file.</br>
|
||||
This is <i>experimental</i>, supported fields are <code>name</code>, <code>uri</code>, <code>tags</code>, <code>config:selections</code>, the rest (including <code>schedule</code>) are ignored.
|
||||
<br/>
|
||||
<p>
|
||||
How to export? <a href="https://distill.io/docs/web-monitor/how-export-and-import-monitors/">https://distill.io/docs/web-monitor/how-export-and-import-monitors/</a><br/>
|
||||
Be sure to set your default fetcher to Chrome if required.</br>
|
||||
</p>
|
||||
</legend>
|
||||
|
||||
|
||||
<textarea name="distill-io" class="pure-input-1-2" style="width: 100%;
|
||||
font-family:monospace;
|
||||
white-space: pre;
|
||||
overflow-wrap: normal;
|
||||
overflow-x: scroll;" placeholder="Example Distill.io JSON export file
|
||||
|
||||
{
|
||||
"client": {
|
||||
"local": 1
|
||||
},
|
||||
"data": [
|
||||
{
|
||||
"name": "Unraid | News",
|
||||
"uri": "https://unraid.net/blog",
|
||||
"config": "{\"selections\":[{\"frames\":[{\"index\":0,\"excludes\":[],\"includes\":[{\"type\":\"xpath\",\"expr\":\"(//div[@id='App']/div[contains(@class,'flex')]/main[contains(@class,'relative')]/section[contains(@class,'relative')]/div[@class='container']/div[contains(@class,'flex')]/div[contains(@class,'w-full')])[1]\"}]}],\"dynamic\":true,\"delay\":2}],\"ignoreEmptyText\":true,\"includeStyle\":false,\"dataAttr\":\"text\"}",
|
||||
"tags": [],
|
||||
"content_type": 2,
|
||||
"state": 40,
|
||||
"schedule": "{\"type\":\"INTERVAL\",\"params\":{\"interval\":4447}}",
|
||||
"ts": "2022-03-27T15:51:15.667Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
" rows="25">{{ original_distill_json }}</textarea>
|
||||
</fieldset>
|
||||
</div>
|
||||
<button type="submit" class="pure-button pure-input-1-2 pure-button-primary">Import</button>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
{% extends 'base.html' %}
|
||||
|
||||
{% block content %}
|
||||
<div class="edit-form">
|
||||
|
||||
<div class="login-form">
|
||||
<div class="inner">
|
||||
<form class="pure-form pure-form-stacked" action="{{url_for('login')}}" method="POST">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
<label for="password">Password</label>
|
||||
<input type="password" id="password" required="" name="password" value=""
|
||||
size="15"/>
|
||||
size="15" autofocus />
|
||||
<input type="hidden" id="email" name="email" value="defaultuser@changedetection.io" />
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
|
||||
19
changedetectionio/templates/notification-log.html
Normal file
19
changedetectionio/templates/notification-log.html
Normal file
@@ -0,0 +1,19 @@
|
||||
{% extends 'base.html' %}
|
||||
|
||||
{% block content %}
|
||||
<div class="edit-form">
|
||||
<div class="inner">
|
||||
|
||||
<h4 style="margin-top: 0px;">The following issues were detected when sending notifications</h4>
|
||||
<div id="notification-error-log">
|
||||
<ul style="font-size: 80%; margin:0px; padding: 0 0 0 7px">
|
||||
{% for log in logs|reverse %}
|
||||
<li>{{log}}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
@@ -3,24 +3,43 @@
|
||||
{% block content %}
|
||||
|
||||
<div id="settings">
|
||||
<h1>Current</h1>
|
||||
<h1>Current - {{watch.last_checked|format_timestamp_timeago}}</h1>
|
||||
</div>
|
||||
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<div class="tabs">
|
||||
<ul>
|
||||
<li class="tab" id="default-tab"><a href="#text">Text</a></li>
|
||||
{% if screenshot %}
|
||||
<li class="tab"><a href="#screenshot">Current screenshot</a></li>
|
||||
{% endif %}
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div id="diff-ui">
|
||||
<div class="tab-pane-inner" id="text">
|
||||
<span class="ignored">Grey lines are ignored</span> <span class="triggered">Blue lines are triggers</span>
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td id="diff-col">
|
||||
{% for row in content %}
|
||||
<div class="{{row.classes}}">{{row.line}}</div>
|
||||
{% endfor %}
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<!-- just proof of concept copied straight from github.com/kpdecker/jsdiff -->
|
||||
{% if screenshot %}
|
||||
<div class="tab-pane-inner" id="screenshot">
|
||||
<p>
|
||||
<i>For now, only the most recent screenshot is saved and displayed.</i>
|
||||
</p>
|
||||
|
||||
<td id="diff-col">
|
||||
<span id="result">{% for row in content %}<pre>{{row}}</pre>{% endfor %}</span>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<img src="{{url_for('static_content', group='screenshot', filename=uuid)}}">
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
|
||||
{% endblock %}
|
||||
@@ -2,10 +2,12 @@
|
||||
|
||||
{% block content %}
|
||||
<div class="edit-form">
|
||||
<div class="box-wrap inner">
|
||||
<form class="pure-form pure-form-stacked" action="{{url_for('scrub_page')}}" method="POST">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
This will remove all version snapshots/data, but keep your list of URLs. <br/>
|
||||
This will remove ALL version snapshots/data, but keep your list of URLs. <br/>
|
||||
You may like to use the <strong>BACKUP</strong> link first.<br/>
|
||||
</div>
|
||||
<br/>
|
||||
@@ -15,12 +17,6 @@
|
||||
<span class="pure-form-message-inline">Type in the word <strong>scrub</strong> to confirm that you understand!</span>
|
||||
</div>
|
||||
<br/>
|
||||
<div class="pure-control-group">
|
||||
<label for="confirmtext">Optional: Limit deletion of snapshots to snapshots <i>newer</i> than date/time</label>
|
||||
<input type="datetime-local" id="limit_date" name="limit_date" />
|
||||
<span class="pure-form-message-inline">dd/mm/yyyy hh:mm (24 hour format)</span>
|
||||
</div>
|
||||
<br/>
|
||||
<div class="pure-control-group">
|
||||
<button type="submit" class="pure-button pure-button-primary">Scrub!</button>
|
||||
</div>
|
||||
@@ -30,6 +26,7 @@
|
||||
</div>
|
||||
</fieldset>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
|
||||
@@ -1,135 +1,176 @@
|
||||
{% extends 'base.html' %}
|
||||
|
||||
{% block content %}
|
||||
{% from '_helpers.jinja' import render_field %}
|
||||
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='settings.js')}}" defer></script>
|
||||
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||
<script>
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||
{% if emailprefix %}
|
||||
const email_notification_prefix=JSON.parse('{{emailprefix|tojson}}');
|
||||
{% endif %}
|
||||
</script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='global-settings.js')}}" defer></script>
|
||||
<div class="edit-form">
|
||||
<div class="tabs">
|
||||
<div class="tabs collapsable">
|
||||
<ul>
|
||||
<li class="tab" id="default-tab"><a href="#general">General</a></li>
|
||||
<li class="tab"><a href="#notifications">Notifications</a></li>
|
||||
<li class="tab"><a href="#fetching">Fetching</a></li>
|
||||
<li class="tab"><a href="#filters">Global Filters</a></li>
|
||||
<li class="tab"><a href="#api">API</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="box-wrap inner">
|
||||
<form class="pure-form pure-form-stacked settings" action="{{url_for('settings_page')}}" method="POST">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
||||
<div class="tab-pane-inner" id="general">
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.minutes_between_check) }}
|
||||
{{ render_field(form.requests.form.time_between_check, class="time-check-widget") }}
|
||||
<span class="pure-form-message-inline">Default time for all watches, when the watch does not have a specific time setting.</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{% if current_user.is_authenticated %}
|
||||
<a href="{{url_for('settings_page', removepassword='yes')}}"
|
||||
class="pure-button pure-button-primary">Remove password</a>
|
||||
{% if not hide_remove_pass %}
|
||||
{% if current_user.is_authenticated %}
|
||||
{{ render_button(form.application.form.removepassword_button) }}
|
||||
{% else %}
|
||||
{{ render_field(form.application.form.password) }}
|
||||
<span class="pure-form-message-inline">Password protection for your changedetection.io application.</span>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{{ render_field(form.password) }}
|
||||
<span class="pure-form-message-inline">Password protection for your changedetection.io application.</span>
|
||||
<span class="pure-form-message-inline">Password is locked.</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.extract_title_as_title) }}
|
||||
{{ render_field(form.application.form.base_url, placeholder="http://yoursite.com:5000/",
|
||||
class="m-d") }}
|
||||
<span class="pure-form-message-inline">
|
||||
Base URL used for the {base_url} token in notifications and RSS links.<br/>Default value is the ENV var 'BASE_URL' (Currently "{{current_base_url}}"),
|
||||
<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Configurable-BASE_URL-setting">read more here</a>.
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div class="pure-control-group">
|
||||
{{ render_checkbox_field(form.application.form.extract_title_as_title) }}
|
||||
<span class="pure-form-message-inline">Note: This will automatically apply to all existing watches.</span>
|
||||
</div>
|
||||
|
||||
<div class="pure-control-group">
|
||||
{{ render_checkbox_field(form.application.form.real_browser_save_screenshot) }}
|
||||
<span class="pure-form-message-inline">When using a Chrome browser, a screenshot from the last check will be available on the Diff page</span>
|
||||
</div>
|
||||
|
||||
<div class="pure-control-group">
|
||||
{{ render_checkbox_field(form.application.form.empty_pages_are_a_change) }}
|
||||
<span class="pure-form-message-inline">When a page contains HTML, but no renderable text appears (empty page), is this considered a change?</span>
|
||||
</div>
|
||||
{% if form.requests.proxy %}
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_field(form.requests.form.proxy, class="fetch-backend-proxy") }}
|
||||
<span class="pure-form-message-inline">
|
||||
Choose a default proxy for all watches
|
||||
</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="notifications">
|
||||
<fieldset>
|
||||
<div class="field-group">
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_urls, rows=5, placeholder="Examples:
|
||||
Gitter - gitter://token/room
|
||||
Office365 - o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail
|
||||
AWS SNS - sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo
|
||||
SMTPS - mailtos://user:pass@mail.domain.com?to=receivingAddress@example.com")
|
||||
}}
|
||||
<div class="pure-form-message-inline">Use <a target=_new
|
||||
href="https://github.com/caronc/apprise">AppRise
|
||||
URLs</a> for notification to just about any service!
|
||||
<a id="toggle-customise-notifications">Customise notification body: <i
|
||||
class="arrow down"></i></a>
|
||||
</div>
|
||||
</div>
|
||||
<div id="notification-customisation" style="display:none;">
|
||||
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_title, class="m-d") }}
|
||||
<span class="pure-form-message-inline">Title for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_body , rows=5) }}
|
||||
<span class="pure-form-message-inline">Body for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-controls">
|
||||
<span class="pure-form-message-inline">
|
||||
These tokens can be used in the notification body and title to
|
||||
customise the notification text.
|
||||
</span>
|
||||
<table class="pure-table" id="token-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Token</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><code>{base_url}</code></td>
|
||||
<td>The URL of the changedetection.io instance you are running.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_url}</code></td>
|
||||
<td>The URL being watched.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{preview_url}</code></td>
|
||||
<td>The URL of the preview page generated by changedetection.io.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{diff_url}</code></td>
|
||||
<td>The URL of the diff page generated by changedetection.io.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{current_snapshot}</code></td>
|
||||
<td>The current snapshot value, useful when combined with JSON or CSS filters
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<span class="pure-form-message-inline">
|
||||
URLs generated by changedetection.io (such as <code>{diff_url}</code>) require the <code>BASE_URL</code> environment variable set.<br/>
|
||||
Your <code>BASE_URL</code> var is currently "{{base_url}}"
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.trigger_check) }}
|
||||
</div>
|
||||
{{ render_common_settings_form(form.application.form, current_base_url, emailprefix) }}
|
||||
</div>
|
||||
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="fetching">
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.fetch_backend) }}
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_field(form.application.form.fetch_backend, class="fetch-backend") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<p>Use the <strong>Basic</strong> method (default) where your watched sites don't need Javascript to render.</p>
|
||||
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server. </p>
|
||||
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>
|
||||
</span>
|
||||
</div>
|
||||
<fieldset class="pure-group" id="webdriver-override-options">
|
||||
<div class="pure-form-message-inline">
|
||||
<strong>If you're having trouble waiting for the page to be fully rendered (text missing etc), try increasing the 'wait' time here.</strong>
|
||||
<br/>
|
||||
This will wait <i>n</i> seconds before extracting the text.
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.application.form.webdriver_delay) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="filters">
|
||||
|
||||
<fieldset class="pure-group">
|
||||
{{ render_checkbox_field(form.application.form.ignore_whitespace) }}
|
||||
<span class="pure-form-message-inline">Ignore whitespace, tabs and new-lines/line-feeds when considering if a change was detected.<br/>
|
||||
<i>Note:</i> Changing this will change the status of your existing watches, possibly trigger alerts etc.
|
||||
</span>
|
||||
</fieldset>
|
||||
<fieldset class="pure-group">
|
||||
{{ render_checkbox_field(form.application.form.render_anchor_tag_content) }}
|
||||
<span class="pure-form-message-inline">Render anchor tag content, default disabled, when enabled renders links as <code>(link text)[https://somesite.com]</code>
|
||||
<br/>
|
||||
<i>Note:</i> Changing this could affect the content of your existing watches, possibly trigger alerts etc.
|
||||
</span>
|
||||
</fieldset>
|
||||
<fieldset class="pure-group">
|
||||
{{ render_field(form.application.form.global_subtractive_selectors, rows=5, placeholder="header
|
||||
footer
|
||||
nav
|
||||
.stockticker") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li> Remove HTML element(s) by CSS selector before text conversion. </li>
|
||||
<li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li>
|
||||
</ul>
|
||||
</span>
|
||||
</fieldset>
|
||||
<fieldset class="pure-group">
|
||||
{{ render_field(form.application.form.global_ignore_text, rows=5, placeholder="Some text to ignore in a line
|
||||
/some.regex\d{2}/ for case-INsensitive regex
|
||||
") }}
|
||||
<span class="pure-form-message-inline">Note: This is applied globally in addition to the per-watch rules.</span><br/>
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Note: This is applied globally in addition to the per-watch rules.</li>
|
||||
<li>Each line processed separately, any line matching will be ignored (removed before creating the checksum)</li>
|
||||
<li>Regular Expression support, wrap the line in forward slash <code>/regex/</code></li>
|
||||
<li>Changing this will affect the comparison checksum which may trigger an alert</li>
|
||||
<li>Use the preview/show current tab to see ignores</li>
|
||||
</ul>
|
||||
</span>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="api">
|
||||
|
||||
<p>Drive your changedetection.io via API, More about <a href="https://github.com/dgtlmoon/changedetection.io/wiki/API-Reference">API access here</a></p>
|
||||
|
||||
<div class="pure-control-group">
|
||||
{{ render_checkbox_field(form.application.form.api_access_token_enabled) }}
|
||||
<div class="pure-form-message-inline">Restrict API access limit by using <code>x-api-key</code> header</div><br/>
|
||||
<div class="pure-form-message-inline"><br/>API Key <span id="api-key">{{api_key}}</span>
|
||||
<span style="display:none;" id="api-key-copy" >copy</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="actions">
|
||||
<div class="pure-control-group">
|
||||
<button type="submit" class="pure-button pure-button-primary">Save</button>
|
||||
<a href="{{url_for('index')}}" class="pure-button button-small button-cancel">Back</a>
|
||||
<a href="{{url_for('scrub_page')}}" class="pure-button button-small button-cancel">Delete
|
||||
History
|
||||
Snapshot Data</a>
|
||||
{{ render_button(form.save_button) }}
|
||||
<a href="{{url_for('index')}}" class="pure-button button-small button-cancel">Back</a>
|
||||
<a href="{{url_for('scrub_page')}}" class="pure-button button-small button-cancel">Delete History Snapshot Data</a>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.jinja' import render_simple_field %}
|
||||
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script>
|
||||
<div class="box">
|
||||
|
||||
<form class="pure-form" action="{{ url_for('api_watch_add') }}" method="POST" id="new-watch-form">
|
||||
<form class="pure-form" action="{{ url_for('form_watch_add') }}" method="POST" id="new-watch-form">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
||||
<fieldset>
|
||||
<legend>Add a new change detection watch</legend>
|
||||
{{ render_simple_field(form.url, placeholder="https://...", required=true) }}
|
||||
{{ render_simple_field(form.tag, value=active_tag if active_tag else '', placeholder="tag") }}
|
||||
{{ render_simple_field(form.tag, value=active_tag if active_tag else '', placeholder="watch group") }}
|
||||
<button type="submit" class="pure-button pure-button-primary">Watch</button>
|
||||
</fieldset>
|
||||
<!-- add extra stuff, like do a http POST and send headers -->
|
||||
<!-- user/pass r = requests.get('https://api.github.com/user', auth=('user', 'pass')) -->
|
||||
<span style="color:#eee; font-size: 80%;"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" /> Tip: You can also add 'shared' watches. <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Sharing-a-Watch">More info</a></a></span>
|
||||
</form>
|
||||
<div>
|
||||
<a href="{{url_for('index')}}" class="pure-button button-tag {{'active' if not active_tag }}">All</a>
|
||||
@@ -42,18 +43,25 @@
|
||||
<tr id="{{ watch.uuid }}"
|
||||
class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }}
|
||||
{% if watch.last_error is defined and watch.last_error != False %}error{% endif %}
|
||||
{% if watch.last_notification_error is defined and watch.last_notification_error != False %}error{% endif %}
|
||||
{% if watch.paused is defined and watch.paused != False %}paused{% endif %}
|
||||
{% if watch.newest_history_key| int > watch.last_viewed| int %}unviewed{% endif %}">
|
||||
{% if watch.newest_history_key| int > watch.last_viewed| int %}unviewed{% endif %}
|
||||
{% if watch.uuid in queued_uuids %}queued{% endif %}">
|
||||
<td class="inline">{{ loop.index }}</td>
|
||||
<td class="inline paused-state state-{{watch.paused}}"><a href="{{url_for('index', pause=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause" title="Pause"/></a></td>
|
||||
|
||||
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
|
||||
<a class="external" target="_blank" rel="noopener" href="{{ watch.url }}"></a>
|
||||
{%if watch.fetch_backend == "html_webdriver" %}<img style="height: 1em; display:inline-block;" src="/static/images/Google-Chrome-icon.png" />{% endif %}
|
||||
<a class="external" target="_blank" rel="noopener" href="{{ watch.url.replace('source:','') }}"></a>
|
||||
<a href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" /></a>
|
||||
|
||||
{%if watch.fetch_backend == "html_webdriver" %}<img style="height: 1em; display:inline-block;" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" />{% endif %}
|
||||
|
||||
{% if watch.last_error is defined and watch.last_error != False %}
|
||||
<div class="fetch-error">{{ watch.last_error }}</div>
|
||||
{% endif %}
|
||||
{% if watch.last_notification_error is defined and watch.last_notification_error != False %}
|
||||
<div class="fetch-error notification-error">{{ watch.last_notification_error }}</div>
|
||||
{% endif %}
|
||||
{% if not active_tag %}
|
||||
<span class="watch-tag-list">{{ watch.tag}}</span>
|
||||
{% endif %}
|
||||
@@ -66,11 +74,11 @@
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
<a href="{{ url_for('api_watch_checknow', uuid=watch.uuid, tag=request.args.get('tag')) }}"
|
||||
class="pure-button button-small pure-button-primary">Recheck</a>
|
||||
<a {% if watch.uuid in queued_uuids %}disabled="true"{% endif %} href="{{ url_for('form_watch_checknow', uuid=watch.uuid, tag=request.args.get('tag')) }}"
|
||||
class="recheck pure-button button-small pure-button-primary">{% if watch.uuid in queued_uuids %}Queued{% else %}Recheck{% endif %}</a>
|
||||
<a href="{{ url_for('edit_page', uuid=watch.uuid)}}" class="pure-button button-small pure-button-primary">Edit</a>
|
||||
{% if watch.history|length >= 2 %}
|
||||
<a href="{{ url_for('diff_history_page', uuid=watch.uuid) }}" target="{{watch.uuid}}" class="pure-button button-small pure-button-primary">Diff</a>
|
||||
<a href="{{ url_for('diff_history_page', uuid=watch.uuid) }}" target="{{watch.uuid}}" class="pure-button button-small pure-button-primary diff-link">Diff</a>
|
||||
{% else %}
|
||||
{% if watch.history|length == 1 %}
|
||||
<a href="{{ url_for('preview_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button button-small pure-button-primary">Preview</a>
|
||||
@@ -88,13 +96,13 @@
|
||||
</li>
|
||||
{% endif %}
|
||||
<li>
|
||||
<a href="{{ url_for('api_watch_checknow', tag=active_tag) }}" class="pure-button button-tag ">Recheck
|
||||
<a href="{{ url_for('form_watch_checknow', tag=active_tag) }}" class="pure-button button-tag ">Recheck
|
||||
all {% if active_tag%}in "{{active_tag}}"{%endif%}</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ url_for('index', tag=active_tag , rss=true)}}"><img id="feed-icon" src="{{url_for('static_content', group='images', filename='Generic_Feed-icon.svg')}}" height="15px"></a>
|
||||
<a href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='Generic_Feed-icon.svg')}}" height="15"></a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
||||
@@ -16,13 +16,14 @@ def cleanup(datastore_path):
|
||||
# Unlink test output files
|
||||
files = ['output.txt',
|
||||
'url-watches.json',
|
||||
'secret.txt',
|
||||
'notification.txt',
|
||||
'count.txt',
|
||||
'endpoint-content.txt']
|
||||
'endpoint-content.txt'
|
||||
]
|
||||
for file in files:
|
||||
try:
|
||||
os.unlink("{}/{}".format(datastore_path, file))
|
||||
x = 1
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
@@ -36,15 +37,15 @@ def app(request):
|
||||
except FileExistsError:
|
||||
pass
|
||||
|
||||
# Enable a BASE_URL for notifications to work (so we can look for diff/ etc URLs)
|
||||
os.environ["BASE_URL"] = "http://mysite.com/"
|
||||
cleanup(datastore_path)
|
||||
|
||||
|
||||
app_config = {'datastore_path': datastore_path}
|
||||
cleanup(app_config['datastore_path'])
|
||||
datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], include_default_watches=False)
|
||||
app = changedetection_app(app_config, datastore)
|
||||
|
||||
# Disable CSRF while running tests
|
||||
app.config['WTF_CSRF_ENABLED'] = False
|
||||
app.config['STOP_THREADS'] = True
|
||||
|
||||
def teardown():
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
from flask import url_for
|
||||
|
||||
from . util import live_server_setup
|
||||
|
||||
def test_check_access_control(app, client):
|
||||
# Still doesnt work, but this is closer.
|
||||
|
||||
with app.test_client() as c:
|
||||
# Check we dont have any password protection enabled yet.
|
||||
with app.test_client(use_cookies=True) as c:
|
||||
# Check we don't have any password protection enabled yet.
|
||||
res = c.get(url_for("settings_page"))
|
||||
assert b"Remove password" not in res.data
|
||||
|
||||
# Enable password check.
|
||||
res = c.post(
|
||||
url_for("settings_page"),
|
||||
data={"password": "foobar",
|
||||
"minutes_between_check": 180,
|
||||
'fetch_backend': "html_requests"},
|
||||
data={"application-password": "foobar",
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -46,63 +46,34 @@ def test_check_access_control(app, client):
|
||||
assert b"BACKUP" in res.data
|
||||
assert b"IMPORT" in res.data
|
||||
assert b"LOG OUT" in res.data
|
||||
assert b"time_between_check-minutes" in res.data
|
||||
assert b"fetch_backend" in res.data
|
||||
|
||||
# Now remove the password so other tests function, @todo this should happen before each test automatically
|
||||
res = c.get(url_for("settings_page", removepassword="yes"),
|
||||
follow_redirects=True)
|
||||
##################################################
|
||||
# Remove password button, and check that it worked
|
||||
##################################################
|
||||
res = c.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-fetch_backend": "html_webdriver",
|
||||
"application-removepassword_button": "Remove password"
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
assert b"Password protection removed." in res.data
|
||||
|
||||
res = c.get(url_for("index"))
|
||||
assert b"LOG OUT" not in res.data
|
||||
|
||||
|
||||
# There was a bug where saving the settings form would submit a blank password
|
||||
def test_check_access_control_no_blank_password(app, client):
|
||||
# Still doesnt work, but this is closer.
|
||||
|
||||
with app.test_client() as c:
|
||||
# Check we dont have any password protection enabled yet.
|
||||
res = c.get(url_for("settings_page"))
|
||||
assert b"Remove password" not in res.data
|
||||
|
||||
# Enable password check.
|
||||
############################################################
|
||||
# Be sure a blank password doesnt setup password protection
|
||||
############################################################
|
||||
res = c.post(
|
||||
url_for("settings_page"),
|
||||
data={"password": "",
|
||||
"minutes_between_check": 180,
|
||||
'fetch_backend': "html_requests"},
|
||||
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Password protection enabled." not in res.data
|
||||
assert b"Login" not in res.data
|
||||
|
||||
|
||||
# There was a bug where saving the settings form would submit a blank password
|
||||
def test_check_access_no_remote_access_to_remove_password(app, client):
|
||||
# Still doesnt work, but this is closer.
|
||||
|
||||
with app.test_client() as c:
|
||||
# Check we dont have any password protection enabled yet.
|
||||
res = c.get(url_for("settings_page"))
|
||||
assert b"Remove password" not in res.data
|
||||
|
||||
# Enable password check.
|
||||
res = c.post(
|
||||
url_for("settings_page"),
|
||||
data={"password": "password", "minutes_between_check": 180,
|
||||
'fetch_backend': "html_requests"},
|
||||
data={"application-password": "",
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Password protection enabled." in res.data
|
||||
assert b"Login" in res.data
|
||||
assert b"Password protection enabled" not in res.data
|
||||
|
||||
res = c.get(url_for("settings_page", removepassword="yes"),
|
||||
follow_redirects=True)
|
||||
assert b"Password protection removed." not in res.data
|
||||
|
||||
res = c.get(url_for("index"),
|
||||
follow_redirects=True)
|
||||
assert b"watch-table-wrapper" not in res.data
|
||||
|
||||
206
changedetectionio/tests/test_api.py
Normal file
206
changedetectionio/tests/test_api.py
Normal file
@@ -0,0 +1,206 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup
|
||||
|
||||
import json
|
||||
import uuid
|
||||
|
||||
|
||||
def set_original_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
<div id="sametext">Some text thats the same</div>
|
||||
<div id="changetext">Some text that will change</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
|
||||
def set_modified_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>which has this one new line</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
<div id="sametext">Some text thats the same</div>
|
||||
<div id="changetext">Some text that changes</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def is_valid_uuid(val):
|
||||
try:
|
||||
uuid.UUID(str(val))
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
# kinda funky, but works for now
|
||||
def _extract_api_key_from_UI(client):
|
||||
import re
|
||||
res = client.get(
|
||||
url_for("settings_page"),
|
||||
)
|
||||
# <span id="api-key">{{api_key}}</span>
|
||||
|
||||
m = re.search('<span id="api-key">(.+?)</span>', str(res.data))
|
||||
api_key = m.group(1)
|
||||
return api_key.strip()
|
||||
|
||||
|
||||
def test_api_simple(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
api_key = _extract_api_key_from_UI(client)
|
||||
|
||||
# Create a watch
|
||||
set_original_response()
|
||||
watch_uuid = None
|
||||
|
||||
# Validate bad URL
|
||||
test_url = url_for('test_endpoint', _external=True,
|
||||
headers={'x-api-key': api_key}, )
|
||||
res = client.post(
|
||||
url_for("createwatch"),
|
||||
data=json.dumps({"url": "h://xxxxxxxxxom"}),
|
||||
headers={'content-type': 'application/json', 'x-api-key': api_key},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert res.status_code == 400
|
||||
|
||||
# Create new
|
||||
res = client.post(
|
||||
url_for("createwatch"),
|
||||
data=json.dumps({"url": test_url, 'tag': "One, Two", "title": "My test URL"}),
|
||||
headers={'content-type': 'application/json', 'x-api-key': api_key},
|
||||
follow_redirects=True
|
||||
)
|
||||
s = json.loads(res.data)
|
||||
assert is_valid_uuid(s['uuid'])
|
||||
watch_uuid = s['uuid']
|
||||
assert res.status_code == 201
|
||||
|
||||
time.sleep(3)
|
||||
|
||||
# Verify its in the list and that recheck worked
|
||||
res = client.get(
|
||||
url_for("createwatch"),
|
||||
headers={'x-api-key': api_key}
|
||||
)
|
||||
assert watch_uuid in json.loads(res.data).keys()
|
||||
before_recheck_info = json.loads(res.data)[watch_uuid]
|
||||
assert before_recheck_info['last_checked'] != 0
|
||||
assert before_recheck_info['title'] == 'My test URL'
|
||||
|
||||
set_modified_response()
|
||||
# Trigger recheck of all ?recheck_all=1
|
||||
client.get(
|
||||
url_for("createwatch", recheck_all='1'),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
time.sleep(3)
|
||||
|
||||
# Did the recheck fire?
|
||||
res = client.get(
|
||||
url_for("createwatch"),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
after_recheck_info = json.loads(res.data)[watch_uuid]
|
||||
assert after_recheck_info['last_checked'] != before_recheck_info['last_checked']
|
||||
assert after_recheck_info['last_changed'] != 0
|
||||
|
||||
# Check history index list
|
||||
res = client.get(
|
||||
url_for("watchhistory", uuid=watch_uuid),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
history = json.loads(res.data)
|
||||
assert len(history) == 2, "Should have two history entries (the original and the changed)"
|
||||
|
||||
# Fetch a snapshot by timestamp, check the right one was found
|
||||
res = client.get(
|
||||
url_for("watchsinglehistory", uuid=watch_uuid, timestamp=list(history.keys())[-1]),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
assert b'which has this one new line' in res.data
|
||||
|
||||
# Fetch a snapshot by 'latest'', check the right one was found
|
||||
res = client.get(
|
||||
url_for("watchsinglehistory", uuid=watch_uuid, timestamp='latest'),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
assert b'which has this one new line' in res.data
|
||||
|
||||
# Fetch the whole watch
|
||||
res = client.get(
|
||||
url_for("watch", uuid=watch_uuid),
|
||||
headers={'x-api-key': api_key}
|
||||
)
|
||||
watch = json.loads(res.data)
|
||||
# @todo how to handle None/default global values?
|
||||
assert watch['history_n'] == 2, "Found replacement history section, which is in its own API"
|
||||
|
||||
# Finally delete the watch
|
||||
res = client.delete(
|
||||
url_for("watch", uuid=watch_uuid),
|
||||
headers={'x-api-key': api_key},
|
||||
)
|
||||
assert res.status_code == 204
|
||||
|
||||
# Check via a relist
|
||||
res = client.get(
|
||||
url_for("createwatch"),
|
||||
headers={'x-api-key': api_key}
|
||||
)
|
||||
watch_list = json.loads(res.data)
|
||||
assert len(watch_list) == 0, "Watch list should be empty"
|
||||
|
||||
|
||||
def test_access_denied(client, live_server):
|
||||
# `config_api_token_enabled` Should be On by default
|
||||
res = client.get(
|
||||
url_for("createwatch")
|
||||
)
|
||||
assert res.status_code == 403
|
||||
|
||||
res = client.get(
|
||||
url_for("createwatch"),
|
||||
headers={'x-api-key': "something horrible"}
|
||||
)
|
||||
assert res.status_code == 403
|
||||
|
||||
# Disable config_api_token_enabled and it should work
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-fetch_backend": "html_requests",
|
||||
"application-api_access_token_enabled": ""
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("createwatch")
|
||||
)
|
||||
assert res.status_code == 200
|
||||
39
changedetectionio/tests/test_auth.py
Normal file
39
changedetectionio/tests/test_auth.py
Normal file
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
|
||||
def test_basic_auth(client, live_server):
|
||||
|
||||
live_server_setup(live_server)
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_basicauth_method', _external=True).replace("//","//myuser:mypass@")
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Check form validation
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": "", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(1)
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b'myuser mypass basic' in res.data
|
||||
@@ -7,6 +7,13 @@ from . util import set_original_response, set_modified_response, live_server_set
|
||||
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
# Basic test to check inscriptus is not adding return line chars, basically works etc
|
||||
def test_inscriptus():
|
||||
from inscriptis import get_text
|
||||
html_content="<html><body>test!<br/>ok man</body></html>"
|
||||
stripped_text_from_html = get_text(html_content)
|
||||
assert stripped_text_from_html == 'test!\nok man'
|
||||
|
||||
|
||||
def test_check_basic_change_detection_functionality(client, live_server):
|
||||
set_original_response()
|
||||
@@ -18,13 +25,14 @@ def test_check_basic_change_detection_functionality(client, live_server):
|
||||
data={"urls": url_for('test_endpoint', _external=True)},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# Do this a few times.. ensures we dont accidently set the status
|
||||
for n in range(3):
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -42,6 +50,14 @@ def test_check_basic_change_detection_functionality(client, live_server):
|
||||
|
||||
#####################
|
||||
|
||||
# Check HTML conversion detected and workd
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
# Check this class does not appear (that we didnt see the actual source)
|
||||
assert b'foobar-detection' not in res.data
|
||||
|
||||
# Make a change
|
||||
set_modified_response()
|
||||
|
||||
@@ -49,8 +65,8 @@ def test_check_basic_change_detection_functionality(client, live_server):
|
||||
assert b'which has this one new line' in res.read()
|
||||
|
||||
# Force recheck
|
||||
res = client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
assert b'1 watches are rechecking.' in res.data
|
||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
assert b'1 watches are queued for rechecking.' in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
@@ -59,9 +75,14 @@ def test_check_basic_change_detection_functionality(client, live_server):
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
# #75, and it should be in the RSS feed
|
||||
res = client.get(url_for("index", rss="true"))
|
||||
res = client.get(url_for("rss"))
|
||||
expected_url = url_for('test_endpoint', _external=True)
|
||||
assert b'<rss' in res.data
|
||||
|
||||
# re #16 should have the diff in here too
|
||||
assert b'(into ) which has this one new line' in res.data
|
||||
assert b'CDATA' in res.data
|
||||
|
||||
assert expected_url.encode('utf-8') in res.data
|
||||
|
||||
# Following the 'diff' link, it should no longer display as 'unviewed' even after we recheck it a few times
|
||||
@@ -72,7 +93,7 @@ def test_check_basic_change_detection_functionality(client, live_server):
|
||||
|
||||
# Do this a few times.. ensures we dont accidently set the status
|
||||
for n in range(2):
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -88,11 +109,11 @@ def test_check_basic_change_detection_functionality(client, live_server):
|
||||
# Enable auto pickup of <title> in settings
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={"extract_title_as_title": "1", "minutes_between_check": 180, 'fetch_backend': "html_requests"},
|
||||
data={"application-extract_title_as_title": "1", "requests-time_between_check-minutes": 180, 'application-fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
@@ -102,6 +123,6 @@ def test_check_basic_change_detection_functionality(client, live_server):
|
||||
|
||||
#
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
25
changedetectionio/tests/test_backup.py
Normal file
25
changedetectionio/tests/test_backup.py
Normal file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from urllib.request import urlopen
|
||||
from . util import set_original_response, set_modified_response, live_server_setup
|
||||
|
||||
|
||||
def test_backup(client, live_server):
|
||||
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
res = client.get(
|
||||
url_for("get_backup"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Should get the right zip content type
|
||||
assert res.content_type == "application/zip"
|
||||
# Should be PK/ZIP stream
|
||||
assert res.data.count(b'PK') >= 2
|
||||
|
||||
30
changedetectionio/tests/test_clone.py
Normal file
30
changedetectionio/tests/test_clone.py
Normal file
@@ -0,0 +1,30 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
|
||||
|
||||
|
||||
def test_trigger_functionality(client, live_server):
|
||||
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": "https://changedetection.io"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
|
||||
res = client.get(
|
||||
url_for("form_clone", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Cloned." in res.data
|
||||
@@ -89,7 +89,7 @@ def test_check_markup_css_filter_restriction(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -110,7 +110,7 @@ def test_check_markup_css_filter_restriction(client, live_server):
|
||||
assert bytes(css_filter.encode('utf-8')) in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -118,7 +118,7 @@ def test_check_markup_css_filter_restriction(client, live_server):
|
||||
set_modified_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
|
||||
168
changedetectionio/tests/test_element_removal.py
Normal file
168
changedetectionio/tests/test_element_removal.py
Normal file
@@ -0,0 +1,168 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
|
||||
from flask import url_for
|
||||
|
||||
from ..html_tools import *
|
||||
from .util import live_server_setup
|
||||
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
|
||||
def set_original_response():
|
||||
test_return_data = """<html>
|
||||
<header>
|
||||
<h2>Header</h2>
|
||||
</header>
|
||||
<nav>
|
||||
<ul>
|
||||
<li><a href="#">A</a></li>
|
||||
<li><a href="#">B</a></li>
|
||||
<li><a href="#">C</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
<div id="changetext">Some text that will change</div>
|
||||
</body>
|
||||
<footer>
|
||||
<p>Footer</p>
|
||||
</footer>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
|
||||
def set_modified_response():
|
||||
test_return_data = """<html>
|
||||
<header>
|
||||
<h2>Header changed</h2>
|
||||
</header>
|
||||
<nav>
|
||||
<ul>
|
||||
<li><a href="#">A changed</a></li>
|
||||
<li><a href="#">B</a></li>
|
||||
<li><a href="#">C</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
<div id="changetext">Some text that changes</div>
|
||||
</body>
|
||||
<footer>
|
||||
<p>Footer changed</p>
|
||||
</footer>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
|
||||
def test_element_removal_output():
|
||||
from changedetectionio import fetch_site_status
|
||||
from inscriptis import get_text
|
||||
|
||||
# Check text with sub-parts renders correctly
|
||||
content = """<html>
|
||||
<header>
|
||||
<h2>Header</h2>
|
||||
</header>
|
||||
<nav>
|
||||
<ul>
|
||||
<li><a href="#">A</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>across multiple lines</p>
|
||||
<div id="changetext">Some text that changes</div>
|
||||
</body>
|
||||
<footer>
|
||||
<p>Footer</p>
|
||||
</footer>
|
||||
</html>
|
||||
"""
|
||||
html_blob = element_removal(
|
||||
["header", "footer", "nav", "#changetext"], html_content=content
|
||||
)
|
||||
text = get_text(html_blob)
|
||||
assert (
|
||||
text
|
||||
== """Some initial text
|
||||
|
||||
across multiple lines
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def test_element_removal_full(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for("test_endpoint", _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"), data={"urls": test_url}, follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Goto the edit page, add the filter data
|
||||
# Not sure why \r needs to be added - absent of the #changetext this is not necessary
|
||||
subtractive_selectors_data = "header\r\nfooter\r\nnav\r\n#changetext"
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"subtractive_selectors": subtractive_selectors_data,
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests",
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Check it saved
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first"),
|
||||
)
|
||||
assert bytes(subtractive_selectors_data.encode("utf-8")) in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# No change yet - first check
|
||||
res = client.get(url_for("index"))
|
||||
assert b"unviewed" not in res.data
|
||||
|
||||
# Make a change to header/footer/nav
|
||||
set_modified_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# There should not be an unviewed change, as changes should be removed
|
||||
res = client.get(url_for("index"))
|
||||
assert b"unviewed" not in res.data
|
||||
87
changedetectionio/tests/test_encoding.py
Normal file
87
changedetectionio/tests/test_encoding.py
Normal file
@@ -0,0 +1,87 @@
|
||||
#!/usr/bin/python3
|
||||
# coding=utf-8
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup
|
||||
import pytest
|
||||
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
|
||||
def set_html_response():
|
||||
test_return_data = """
|
||||
<html><body><span class="nav_second_img_text">
|
||||
铸大国重器,挺制造脊梁,致力能源未来,赋能美好生活。
|
||||
</span>
|
||||
</body></html>
|
||||
"""
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
|
||||
# In the case the server does not issue a charset= or doesnt have content_type header set
|
||||
def test_check_encoding_detection(client, live_server):
|
||||
set_html_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', content_type="text/html", _external=True)
|
||||
client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(2)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Should see the proper string
|
||||
assert "铸大国重".encode('utf-8') in res.data
|
||||
# Should not see the failed encoding
|
||||
assert b'\xc2\xa7' not in res.data
|
||||
|
||||
|
||||
# In the case the server does not issue a charset= or doesnt have content_type header set
|
||||
def test_check_encoding_detection_missing_content_type_header(client, live_server):
|
||||
set_html_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(2)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Should see the proper string
|
||||
assert "铸大国重".encode('utf-8') in res.data
|
||||
# Should not see the failed encoding
|
||||
assert b'\xc2\xa7' not in res.data
|
||||
65
changedetectionio/tests/test_errorhandling.py
Normal file
65
changedetectionio/tests/test_errorhandling.py
Normal file
@@ -0,0 +1,65 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
|
||||
from ..html_tools import *
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
|
||||
def test_error_handler(client, live_server):
|
||||
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint',
|
||||
status_code=403,
|
||||
_external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'Status Code 403' in res.data
|
||||
assert bytes("just now".encode('utf-8')) in res.data
|
||||
|
||||
# Just to be sure error text is properly handled
|
||||
def test_error_text_handler(client, live_server):
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": "https://errorfuldomainthatnevereallyexists12356.com"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'Name or service not known' in res.data
|
||||
assert bytes("just now".encode('utf-8')) in res.data
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
import json
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import set_original_response, set_modified_response, live_server_setup
|
||||
|
||||
# Hard to just add more live server URLs when one test is already running (I think)
|
||||
# So we add our test here (was in a different file)
|
||||
def test_headers_in_request(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_headers', _external=True)
|
||||
|
||||
# Add the test URL twice, we will check
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
cookie_header = '_ga=GA1.2.1022228332; cookie-preferences=analytics:accepted;'
|
||||
|
||||
|
||||
# Add some headers to a request
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"headers": "xxx:ooo\ncool:yeah\r\ncookie:"+cookie_header},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
|
||||
# Give the thread time to pick up the first version
|
||||
time.sleep(5)
|
||||
|
||||
# The service should echo back the request headers
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Flask will convert the header key to uppercase
|
||||
assert b"Xxx:ooo" in res.data
|
||||
assert b"Cool:yeah" in res.data
|
||||
|
||||
# The test call service will return the headers as the body
|
||||
from html import escape
|
||||
assert escape(cookie_header).encode('utf-8') in res.data
|
||||
|
||||
time.sleep(5)
|
||||
|
||||
# Re #137 - Examine the JSON index file, it should have only one set of headers entered
|
||||
watches_with_headers = 0
|
||||
with open('test-datastore/url-watches.json') as f:
|
||||
app_struct = json.load(f)
|
||||
for uuid in app_struct['watching']:
|
||||
if (len(app_struct['watching'][uuid]['headers'])):
|
||||
watches_with_headers += 1
|
||||
|
||||
# Should be only one with headers set
|
||||
assert watches_with_headers==1
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
38
changedetectionio/tests/test_html_to_text.py
Normal file
38
changedetectionio/tests/test_html_to_text.py
Normal file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/python3
|
||||
"""Test suite for the method to extract text from an html string"""
|
||||
from ..html_tools import html_to_text
|
||||
|
||||
|
||||
def test_html_to_text_func():
|
||||
test_html = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
<a href="/first_link"> More Text </a>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
<a href="second_link.com"> Even More Text </a>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
# extract text, with 'render_anchor_tag_content' set to False
|
||||
text_content = html_to_text(test_html, render_anchor_tag_content=False)
|
||||
|
||||
no_links_text = \
|
||||
"Some initial text\n\nWhich is across multiple " \
|
||||
"lines\n\nMore Text So let's see what happens. Even More Text"
|
||||
|
||||
# check that no links are in the extracted text
|
||||
assert text_content == no_links_text
|
||||
|
||||
# extract text, with 'render_anchor_tag_content' set to True
|
||||
text_content = html_to_text(test_html, render_anchor_tag_content=True)
|
||||
|
||||
links_text = \
|
||||
"Some initial text\n\nWhich is across multiple lines\n\n[ More Text " \
|
||||
"](/first_link) So let's see what happens. [ Even More Text ]" \
|
||||
"(second_link.com)"
|
||||
|
||||
# check that links are present in the extracted text
|
||||
assert text_content == links_text
|
||||
@@ -3,6 +3,7 @@
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
from changedetectionio import html_tools
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
@@ -23,7 +24,7 @@ def test_strip_regex_text_func():
|
||||
ignore_lines = ["sometimes", "/\s\d{2,3}\s/", "/ignore-case text/"]
|
||||
|
||||
fetcher = fetch_site_status.perform_site_check(datastore=False)
|
||||
stripped_content = fetcher.strip_ignore_text(test_content, ignore_lines)
|
||||
stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines)
|
||||
|
||||
assert b"but 1 lines" in stripped_content
|
||||
assert b"igNORe-cAse text" not in stripped_content
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
from changedetectionio import html_tools
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
@@ -23,7 +24,7 @@ def test_strip_text_func():
|
||||
ignore_lines = ["sometimes"]
|
||||
|
||||
fetcher = fetch_site_status.perform_site_check(datastore=False)
|
||||
stripped_content = fetcher.strip_ignore_text(test_content, ignore_lines)
|
||||
stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines)
|
||||
|
||||
assert b"sometimes" not in stripped_content
|
||||
assert b"Some content" in stripped_content
|
||||
@@ -52,6 +53,8 @@ def set_modified_original_ignore_response():
|
||||
<p>Which is across multiple lines</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
<p>new ignore stuff</p>
|
||||
<p>blah</p>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -67,7 +70,7 @@ def set_modified_ignore_response():
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
<P>ZZZZZ</P>
|
||||
<P>ZZZZz</P>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
</body>
|
||||
@@ -82,7 +85,8 @@ def set_modified_ignore_response():
|
||||
def test_check_ignore_text_functionality(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
ignore_text = "XXXXX\r\nYYYYY\r\nZZZZZ"
|
||||
# Use a mix of case in ZzZ to prove it works case-insensitive.
|
||||
ignore_text = "XXXXX\r\nYYYYY\r\nzZzZZ\r\nnew ignore stuff"
|
||||
set_original_ignore_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
@@ -98,7 +102,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -119,7 +123,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
||||
assert bytes(ignore_text.encode('utf-8')) in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -133,7 +137,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
||||
set_modified_ignore_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
@@ -142,12 +146,115 @@ def test_check_ignore_text_functionality(client, live_server):
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Just to be sure.. set a regular modified change..
|
||||
set_modified_original_ignore_response()
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
# Check the preview/highlighter, we should be able to see what we ignored, but it should be highlighted
|
||||
# We only introduce the "modified" content that includes what we ignore so we can prove the newest version also displays
|
||||
# at /preview
|
||||
res = client.get(url_for("preview_page", uuid="first"))
|
||||
# We should be able to see what we ignored
|
||||
assert b'<div class="ignored">new ignore stuff' in res.data
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_check_global_ignore_text_functionality(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
ignore_text = "XXXXX\r\nYYYYY\r\nZZZZZ"
|
||||
set_original_ignore_response()
|
||||
|
||||
# Goto the settings page, add our ignore text
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-global_ignore_text": ignore_text,
|
||||
'application-fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
|
||||
# Goto the edit page of the item, add our ignore text
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"ignore_text": "something irrelevent but just to check", "url": test_url, 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Check it saved
|
||||
res = client.get(
|
||||
url_for("settings_page"),
|
||||
)
|
||||
assert bytes(ignore_text.encode('utf-8')) in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# so that we are sure everything is viewed and in a known 'nothing changed' state
|
||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
|
||||
# Make a change which includes the ignore text
|
||||
set_modified_ignore_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
# Just to be sure.. set a regular modified change that will trigger it
|
||||
set_modified_original_ignore_response()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
res = client.get(url_for("api_delete", uuid="all"), follow_redirects=True)
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
125
changedetectionio/tests/test_ignorehyperlinks.py
Normal file
125
changedetectionio/tests/test_ignorehyperlinks.py
Normal file
@@ -0,0 +1,125 @@
|
||||
#!/usr/bin/python3
|
||||
"""Test suite for the render/not render anchor tag content functionality"""
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup
|
||||
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def set_original_ignore_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<a href="/original_link"> Some More Text </a>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
|
||||
# Should be the same as set_original_ignore_response() but with a different
|
||||
# link
|
||||
def set_modified_ignore_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<a href="/modified_link"> Some More Text </a>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
def test_render_anchor_tag_content_true(client, live_server):
|
||||
"""Testing that the link changes are detected when
|
||||
render_anchor_tag_content setting is set to true"""
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# set original html text
|
||||
set_original_ignore_response()
|
||||
|
||||
# Goto the settings page, choose to ignore links (dont select/send "application-render_anchor_tag_content")
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-fetch_backend": "html_requests",
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for("test_endpoint", _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"), data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# set a new html text with a modified link
|
||||
set_modified_ignore_response()
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# We should not see the rendered anchor tag
|
||||
res = client.get(url_for("preview_page", uuid="first"))
|
||||
assert '(/modified_link)' not in res.data.decode()
|
||||
|
||||
# Goto the settings page, ENABLE render anchor tag
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-render_anchor_tag_content": "true",
|
||||
"application-fetch_backend": "html_requests",
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
|
||||
|
||||
# check that the anchor tag content is rendered
|
||||
res = client.get(url_for("preview_page", uuid="first"))
|
||||
assert '(/modified_link)' in res.data.decode()
|
||||
|
||||
# since the link has changed, and we chose to render anchor tag content,
|
||||
# we should detect a change (new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
assert b"unviewed" in res.data
|
||||
assert b"/test-endpoint" in res.data
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("form_delete", uuid="all"),
|
||||
follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
190
changedetectionio/tests/test_ignorestatuscode.py
Normal file
190
changedetectionio/tests/test_ignorestatuscode.py
Normal file
@@ -0,0 +1,190 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
|
||||
def set_original_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
|
||||
def set_some_changed_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines, and a new thing too.</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
|
||||
def test_normal_page_check_works_with_ignore_status_code(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
set_original_response()
|
||||
|
||||
# Goto the settings page, add our ignore text
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-ignore_status_codes": "y",
|
||||
'application-fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
set_some_changed_response()
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
|
||||
|
||||
# Tests the whole stack works with staus codes ignored
|
||||
def test_403_page_check_works_with_ignore_status_code(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', status_code=403, _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# Goto the edit page, check our ignore option
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"ignore_status_codes": "y", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
# Make a change
|
||||
set_some_changed_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
# Because it should be looking at only that 'sametext' id
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
|
||||
# Tests the whole stack works with staus codes ignored
|
||||
def test_403_page_check_fails_without_ignore_status_code(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', status_code=403, _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# Goto the edit page, check our ignore option
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
# Make a change
|
||||
set_some_changed_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
# Because it should be looking at only that 'sametext' id
|
||||
res = client.get(url_for("index"))
|
||||
assert b'Status Code 403' in res.data
|
||||
96
changedetectionio/tests/test_ignorewhitespace.py
Normal file
96
changedetectionio/tests/test_ignorewhitespace.py
Normal file
@@ -0,0 +1,96 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
|
||||
# Should be the same as set_original_ignore_response() but with a little more whitespacing
|
||||
def set_original_ignore_response_but_with_whitespace():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>
|
||||
|
||||
|
||||
Which is across multiple lines</p>
|
||||
<br>
|
||||
</br>
|
||||
|
||||
So let's see what happens. </br>
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
"""
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
|
||||
def set_original_ignore_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
|
||||
|
||||
# If there was only a change in the whitespacing, then we shouldnt have a change detected
|
||||
def test_check_ignore_whitespace(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
set_original_ignore_response()
|
||||
|
||||
# Goto the settings page, add our ignore text
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"application-ignore_whitespace": "y",
|
||||
"application-fetch_backend": "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
set_original_ignore_response_but_with_whitespace()
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'/test-endpoint' in res.data
|
||||
120
changedetectionio/tests/test_import.py
Normal file
120
changedetectionio/tests/test_import.py
Normal file
@@ -0,0 +1,120 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
|
||||
from flask import url_for
|
||||
|
||||
from .util import live_server_setup
|
||||
def test_setup(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def test_import(client, live_server):
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={
|
||||
"distill-io": "",
|
||||
"urls": """https://example.com
|
||||
https://example.com tag1
|
||||
https://example.com tag1, other tag"""
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
assert b"3 Imported" in res.data
|
||||
assert b"tag1" in res.data
|
||||
assert b"other tag" in res.data
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
|
||||
# Clear flask alerts
|
||||
res = client.get( url_for("index"))
|
||||
res = client.get( url_for("index"))
|
||||
|
||||
def xtest_import_skip_url(client, live_server):
|
||||
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={
|
||||
"distill-io": "",
|
||||
"urls": """https://example.com
|
||||
:ht000000broken
|
||||
"""
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
assert b"ht000000broken" in res.data
|
||||
assert b"1 Skipped" in res.data
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
# Clear flask alerts
|
||||
res = client.get( url_for("index"))
|
||||
|
||||
def test_import_distillio(client, live_server):
|
||||
|
||||
distill_data='''
|
||||
{
|
||||
"client": {
|
||||
"local": 1
|
||||
},
|
||||
"data": [
|
||||
{
|
||||
"name": "Unraid | News",
|
||||
"uri": "https://unraid.net/blog",
|
||||
"config": "{\\"selections\\":[{\\"frames\\":[{\\"index\\":0,\\"excludes\\":[],\\"includes\\":[{\\"type\\":\\"xpath\\",\\"expr\\":\\"(//div[@id='App']/div[contains(@class,'flex')]/main[contains(@class,'relative')]/section[contains(@class,'relative')]/div[@class='container']/div[contains(@class,'flex')]/div[contains(@class,'w-full')])[1]\\"}]}],\\"dynamic\\":true,\\"delay\\":2}],\\"ignoreEmptyText\\":true,\\"includeStyle\\":false,\\"dataAttr\\":\\"text\\"}",
|
||||
"tags": ["nice stuff", "nerd-news"],
|
||||
"content_type": 2,
|
||||
"state": 40,
|
||||
"schedule": "{\\"type\\":\\"INTERVAL\\",\\"params\\":{\\"interval\\":4447}}",
|
||||
"ts": "2022-03-27T15:51:15.667Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
'''
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={
|
||||
"distill-io": distill_data,
|
||||
"urls" : ''
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
|
||||
|
||||
assert b"Unable to read JSON file, was it broken?" not in res.data
|
||||
assert b"1 Imported from Distill.io" in res.data
|
||||
|
||||
res = client.get( url_for("edit_page", uuid="first"))
|
||||
|
||||
assert b"https://unraid.net/blog" in res.data
|
||||
assert b"Unraid | News" in res.data
|
||||
|
||||
|
||||
# flask/wtforms should recode this, check we see it
|
||||
# wtforms encodes it like id=' ,but html.escape makes it like id='
|
||||
# - so just check it manually :(
|
||||
#import json
|
||||
#import html
|
||||
#d = json.loads(distill_data)
|
||||
# embedded_d=json.loads(d['data'][0]['config'])
|
||||
# x=html.escape(embedded_d['selections'][0]['frames'][0]['includes'][0]['expr']).encode('utf-8')
|
||||
assert b"xpath:(//div[@id='App']/div[contains(@class,'flex')]/main[contains(@class,'relative')]/section[contains(@class,'relative')]/div[@class='container']/div[contains(@class,'flex')]/div[contains(@class,'w-full')])[1]" in res.data
|
||||
|
||||
# did the tags work?
|
||||
res = client.get( url_for("index"))
|
||||
|
||||
assert b"nice stuff" in res.data
|
||||
assert b"nerd-news" in res.data
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
# Clear flask alerts
|
||||
res = client.get(url_for("index"))
|
||||
@@ -1,10 +1,15 @@
|
||||
#!/usr/bin/python3
|
||||
# coding=utf-8
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
import pytest
|
||||
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def test_unittest_inline_html_extract():
|
||||
# So lets pretend that the JSON we want is inside some HTML
|
||||
content="""
|
||||
@@ -42,6 +47,45 @@ and it can also be repeated
|
||||
with pytest.raises(html_tools.JSONNotFound) as e_info:
|
||||
html_tools.extract_json_as_string('COMPLETE GIBBERISH, NO JSON!', "$.id")
|
||||
|
||||
def set_original_ext_response():
|
||||
data = """
|
||||
[
|
||||
{
|
||||
"isPriceLowered": false,
|
||||
"status": "ForSale",
|
||||
"statusOrig": "for sale"
|
||||
},
|
||||
{
|
||||
"_id": "5e7b3e1fb3262d306323ff1e",
|
||||
"listingsType": "consumer",
|
||||
"status": "ForSale",
|
||||
"statusOrig": "for sale"
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(data)
|
||||
|
||||
def set_modified_ext_response():
|
||||
data = """
|
||||
[
|
||||
{
|
||||
"isPriceLowered": false,
|
||||
"status": "Sold",
|
||||
"statusOrig": "sold"
|
||||
},
|
||||
{
|
||||
"_id": "5e7b3e1fb3262d306323ff1e",
|
||||
"listingsType": "consumer",
|
||||
"isPriceLowered": false,
|
||||
"status": "Sold"
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(data)
|
||||
|
||||
def set_original_response():
|
||||
test_return_data = """
|
||||
@@ -60,7 +104,23 @@ def set_original_response():
|
||||
],
|
||||
"boss": {
|
||||
"name": "Fat guy"
|
||||
}
|
||||
},
|
||||
"available": true
|
||||
}
|
||||
"""
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
|
||||
def set_response_with_html():
|
||||
test_return_data = """
|
||||
{
|
||||
"test": [
|
||||
{
|
||||
"html": "<b>"
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
@@ -83,8 +143,9 @@ def set_modified_response():
|
||||
}
|
||||
],
|
||||
"boss": {
|
||||
"name": "Foobar"
|
||||
}
|
||||
"name": "Örnsköldsvik"
|
||||
},
|
||||
"available": false
|
||||
}
|
||||
"""
|
||||
|
||||
@@ -93,11 +154,38 @@ def set_modified_response():
|
||||
|
||||
return None
|
||||
|
||||
def test_check_json_without_filter(client, live_server):
|
||||
# Request a JSON document from a application/json source containing HTML
|
||||
# and be sure it doesn't get chewed up by instriptis
|
||||
set_response_with_html()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', content_type="application/json", _external=True)
|
||||
client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b'"<b>' in res.data
|
||||
assert res.data.count(b'{\n') >= 2
|
||||
|
||||
|
||||
def test_check_json_filter(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
json_filter = 'json:boss.name'
|
||||
|
||||
set_original_response()
|
||||
@@ -106,7 +194,7 @@ def test_check_json_filter(client, live_server):
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
test_url = url_for('test_endpoint', content_type="application/json", _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
@@ -115,7 +203,7 @@ def test_check_json_filter(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
@@ -141,7 +229,7 @@ def test_check_json_filter(client, live_server):
|
||||
assert bytes(json_filter.encode('utf-8')) in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
@@ -149,7 +237,7 @@ def test_check_json_filter(client, live_server):
|
||||
set_modified_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(4)
|
||||
|
||||
@@ -159,5 +247,132 @@ def test_check_json_filter(client, live_server):
|
||||
|
||||
# Should not see this, because its not in the JSONPath we entered
|
||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||
|
||||
# But the change should be there, tho its hard to test the change was detected because it will show old and new versions
|
||||
assert b'Foobar' in res.data
|
||||
# And #462 - check we see the proper utf-8 string there
|
||||
assert "Örnsköldsvik".encode('utf-8') in res.data
|
||||
|
||||
|
||||
def test_check_json_filter_bool_val(client, live_server):
|
||||
json_filter = "json:$['available']"
|
||||
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
test_url = url_for('test_endpoint', content_type="application/json", _external=True)
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(3)
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": json_filter,
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
time.sleep(3)
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
# Make a change
|
||||
set_modified_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
|
||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||
# But the change should be there, tho its hard to test the change was detected because it will show old and new versions
|
||||
assert b'false' in res.data
|
||||
|
||||
# Re #265 - Extended JSON selector test
|
||||
# Stuff to consider here
|
||||
# - Selector should be allowed to return empty when it doesnt match (people might wait for some condition)
|
||||
# - The 'diff' tab could show the old and new content
|
||||
# - Form should let us enter a selector that doesnt (yet) match anything
|
||||
def test_check_json_ext_filter(client, live_server):
|
||||
json_filter = 'json:$[?(@.status==Sold)]'
|
||||
|
||||
set_original_ext_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', content_type="application/json", _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": json_filter,
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Check it saved
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first"),
|
||||
)
|
||||
assert bytes(json_filter.encode('utf-8')) in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
# Make a change
|
||||
set_modified_ext_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(4)
|
||||
|
||||
# It should have 'unviewed'
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||
|
||||
# We should never see 'ForSale' because we are selecting on 'Sold' in the rule,
|
||||
# But we should know it triggered ('unviewed' assert above)
|
||||
assert b'ForSale' not in res.data
|
||||
assert b'Sold' in res.data
|
||||
|
||||
|
||||
102
changedetectionio/tests/test_nonrenderable_pages.py
Normal file
102
changedetectionio/tests/test_nonrenderable_pages.py
Normal file
@@ -0,0 +1,102 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from urllib.request import urlopen
|
||||
from .util import set_original_response, set_modified_response, live_server_setup
|
||||
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
|
||||
def set_nonrenderable_response():
|
||||
test_return_data = """<html>
|
||||
<head><title>modified head title</title></head>
|
||||
<!-- like when some angular app was broken and doesnt render or whatever -->
|
||||
<body>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
return None
|
||||
|
||||
def test_check_basic_change_detection_functionality(client, live_server):
|
||||
set_original_response()
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": url_for('test_endpoint', _external=True)},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# Do this a few times.. ensures we dont accidently set the status
|
||||
for n in range(3):
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
|
||||
|
||||
#####################
|
||||
client.post(
|
||||
url_for("settings_page"),
|
||||
data={"application-empty_pages_are_a_change": "",
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# this should not trigger a change, because no good text could be converted from the HTML
|
||||
set_nonrenderable_response()
|
||||
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
|
||||
|
||||
# ok now do the opposite
|
||||
|
||||
client.post(
|
||||
url_for("settings_page"),
|
||||
data={"application-empty_pages_are_a_change": "y",
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
set_modified_response()
|
||||
|
||||
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should report nothing found (no new 'unviewed' class)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
|
||||
|
||||
|
||||
#
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
@@ -1,26 +1,46 @@
|
||||
import os
|
||||
import time
|
||||
import re
|
||||
from flask import url_for
|
||||
from . util import set_original_response, set_modified_response, live_server_setup
|
||||
from . util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup
|
||||
import logging
|
||||
from changedetectionio.notification import default_notification_body, default_notification_title
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Hard to just add more live server URLs when one test is already running (I think)
|
||||
# So we add our test here (was in a different file)
|
||||
def test_check_notification(client, live_server):
|
||||
|
||||
live_server_setup(live_server)
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(3)
|
||||
|
||||
# Re 360 - new install should have defaults set
|
||||
res = client.get(url_for("settings_page"))
|
||||
assert default_notification_body.encode() in res.data
|
||||
assert default_notification_title.encode() in res.data
|
||||
|
||||
# When test mode is in BASE_URL env mode, we should see this already configured
|
||||
env_base_url = os.getenv('BASE_URL', '').strip()
|
||||
if len(env_base_url):
|
||||
logging.debug(">>> BASE_URL enabled, looking for %s", env_base_url)
|
||||
res = client.get(url_for("settings_page"))
|
||||
assert bytes(env_base_url.encode('utf-8')) in res.data
|
||||
else:
|
||||
logging.debug(">>> SKIPPING BASE_URL check")
|
||||
|
||||
# re #242 - when you edited an existing new entry, it would not correctly show the notification settings
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
url_for("form_watch_add"),
|
||||
data={"url": test_url, "tag": ''},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
# Give the thread time to pick up the first version
|
||||
time.sleep(3)
|
||||
@@ -31,104 +51,163 @@ def test_check_notification(client, live_server):
|
||||
notification_url = url.replace('http', 'json')
|
||||
|
||||
print (">>>> Notification URL: "+notification_url)
|
||||
|
||||
notification_form_data = {"notification_urls": notification_url,
|
||||
"notification_title": "New ChangeDetection.io Notification - {watch_url}",
|
||||
"notification_body": "BASE URL: {base_url}\n"
|
||||
"Watch URL: {watch_url}\n"
|
||||
"Watch UUID: {watch_uuid}\n"
|
||||
"Watch title: {watch_title}\n"
|
||||
"Watch tag: {watch_tag}\n"
|
||||
"Preview: {preview_url}\n"
|
||||
"Diff URL: {diff_url}\n"
|
||||
"Snapshot: {current_snapshot}\n"
|
||||
"Diff: {diff}\n"
|
||||
"Diff Full: {diff_full}\n"
|
||||
":-)",
|
||||
"notification_format": "Text"}
|
||||
|
||||
notification_form_data.update({
|
||||
"url": test_url,
|
||||
"tag": "my tag",
|
||||
"title": "my title",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests"})
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"notification_urls": notification_url,
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"trigger_check": "y"},
|
||||
data=notification_form_data,
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
assert b"Notifications queued" in res.data
|
||||
|
||||
|
||||
# Hit the edit page, be sure that we saved it
|
||||
# Re #242 - wasnt saving?
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first"))
|
||||
assert bytes(notification_url.encode('utf-8')) in res.data
|
||||
assert bytes("New ChangeDetection.io Notification".encode('utf-8')) in res.data
|
||||
|
||||
|
||||
# Because we hit 'send test notification on save'
|
||||
|
||||
## Now recheck, and it should have sent the notification
|
||||
time.sleep(3)
|
||||
set_modified_response()
|
||||
|
||||
notification_submission = None
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(3)
|
||||
# Verify what was sent as a notification, this file should exist
|
||||
with open("test-datastore/notification.txt", "r") as f:
|
||||
notification_submission = f.read()
|
||||
# Did we see the URL that had a change, in the notification?
|
||||
|
||||
assert test_url in notification_submission
|
||||
|
||||
os.unlink("test-datastore/notification.txt")
|
||||
|
||||
# Did we see the URL that had a change, in the notification?
|
||||
# Diff was correctly executed
|
||||
assert test_url in notification_submission
|
||||
assert ':-)' in notification_submission
|
||||
assert "Diff Full: Some initial text" in notification_submission
|
||||
assert "Diff: (changed) Which is across multiple lines" in notification_submission
|
||||
assert "(into ) which has this one new line" in notification_submission
|
||||
# Re #342 - check for accidental python byte encoding of non-utf8/string
|
||||
assert "b'" not in notification_submission
|
||||
assert re.search('Watch UUID: [0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}', notification_submission, re.IGNORECASE)
|
||||
assert "Watch title: my title" in notification_submission
|
||||
assert "Watch tag: my tag" in notification_submission
|
||||
assert "diff/" in notification_submission
|
||||
assert "preview/" in notification_submission
|
||||
assert ":-)" in notification_submission
|
||||
assert "New ChangeDetection.io Notification - {}".format(test_url) in notification_submission
|
||||
|
||||
set_modified_response()
|
||||
if env_base_url:
|
||||
# Re #65 - did we see our BASE_URl ?
|
||||
logging.debug (">>> BASE_URL checking in notification: %s", env_base_url)
|
||||
assert env_base_url in notification_submission
|
||||
else:
|
||||
logging.debug(">>> Skipping BASE_URL check")
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
|
||||
# This should insert the {current_snapshot}
|
||||
set_more_modified_response()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(3)
|
||||
|
||||
# Did the front end see it?
|
||||
res = client.get(
|
||||
url_for("index"))
|
||||
|
||||
assert bytes("just now".encode('utf-8')) in res.data
|
||||
|
||||
notification_submission=None
|
||||
# Verify what was sent as a notification
|
||||
# Verify what was sent as a notification, this file should exist
|
||||
with open("test-datastore/notification.txt", "r") as f:
|
||||
notification_submission = f.read()
|
||||
# Did we see the URL that had a change, in the notification?
|
||||
|
||||
assert test_url in notification_submission
|
||||
|
||||
# Re #65 - did we see our foobar.com BASE_URL ?
|
||||
#assert bytes("https://foobar.com".encode('utf-8')) in notification_submission
|
||||
assert "Ohh yeah awesome" in notification_submission
|
||||
|
||||
|
||||
## Now configure something clever, we go into custom config (non-default) mode, this is returned by the endpoint
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(";jasdhflkjadshf kjhsdfkjl ahslkjf haslkjd hfaklsj hf\njl;asdhfkasj stuff we will detect\n")
|
||||
# Prove that "content constantly being marked as Changed with no Updating causes notification" is not a thing
|
||||
# https://github.com/dgtlmoon/changedetection.io/discussions/192
|
||||
os.unlink("test-datastore/notification.txt")
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(1)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(1)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(1)
|
||||
assert os.path.exists("test-datastore/notification.txt") == False
|
||||
|
||||
# cleanup for the next
|
||||
client.get(
|
||||
url_for("form_delete", uuid="all"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def test_notification_validation(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
time.sleep(3)
|
||||
# re #242 - when you edited an existing new entry, it would not correctly show the notification settings
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_watch_add"),
|
||||
data={"url": test_url, "tag": 'nice one'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
# Re #360 some validation
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"notification_urls": 'json://localhost/foobar',
|
||||
"notification_title": "",
|
||||
"notification_body": "",
|
||||
"notification_format": "Text",
|
||||
"url": test_url,
|
||||
"tag": "my tag",
|
||||
"title": "my title",
|
||||
"headers": "",
|
||||
"fetch_backend": "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Notification Body and Title is required when a Notification URL is used" in res.data
|
||||
|
||||
# Now adding a wrong token should give us an error
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={"notification_title": "New ChangeDetection.io Notification - {watch_url}",
|
||||
"notification_body": "{base_url}\n{watch_url}\n{preview_url}\n{diff_url}\n{current_snapshot}\n:-)",
|
||||
"notification_urls": "json://foobar.com", #Re #143 should not see that it sent without [test checkbox]
|
||||
"minutes_between_check": 180,
|
||||
"fetch_backend": "html_requests",
|
||||
data={"application-notification_title": "New ChangeDetection.io Notification - {watch_url}",
|
||||
"application-notification_body": "Rubbish: {rubbish}\n",
|
||||
"application-notification_format": "Text",
|
||||
"application-notification_urls": "json://localhost/foobar",
|
||||
"requests-time_between_check-minutes": 180,
|
||||
"fetch_backend": "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
# Re #143 - should not see this if we didnt hit the test box
|
||||
assert b"Notifications queued" not in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
assert bytes("is not a valid token".encode('utf-8')) in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
|
||||
# Did the front end see it?
|
||||
res = client.get(
|
||||
url_for("index"))
|
||||
|
||||
assert bytes("just now".encode('utf-8')) in res.data
|
||||
|
||||
with open("test-datastore/notification.txt", "r") as f:
|
||||
notification_submission = f.read()
|
||||
|
||||
# @todo regex that diff/uuid-31123-123-etc
|
||||
|
||||
assert "diff/" in notification_submission
|
||||
assert "preview/" in notification_submission
|
||||
assert ":-)" in notification_submission
|
||||
assert "New ChangeDetection.io Notification - {}".format(test_url) in notification_submission
|
||||
# This should insert the {current_snapshot}
|
||||
assert "stuff we will detect" in notification_submission
|
||||
# cleanup for the next
|
||||
client.get(
|
||||
url_for("form_delete", uuid="all"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
66
changedetectionio/tests/test_notification_errors.py
Normal file
66
changedetectionio/tests/test_notification_errors.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import os
|
||||
import time
|
||||
import re
|
||||
from flask import url_for
|
||||
from . util import set_original_response, set_modified_response, live_server_setup
|
||||
import logging
|
||||
|
||||
def test_check_notification_error_handling(client, live_server):
|
||||
|
||||
live_server_setup(live_server)
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(3)
|
||||
|
||||
# use a different URL so that it doesnt interfere with the actual check until we are ready
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_watch_add"),
|
||||
data={"url": "https://changedetection.io/CHANGELOG.txt", "tag": ''},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
time.sleep(10)
|
||||
|
||||
# Check we capture the failure, we can just use trigger_check = y here
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"notification_urls": "jsons://broken-url.changedetection.io/test",
|
||||
"notification_title": "xxx",
|
||||
"notification_body": "xxxxx",
|
||||
"notification_format": "Text",
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"title": "",
|
||||
"headers": "",
|
||||
"time_between_check-minutes": "180",
|
||||
"fetch_backend": "html_requests",
|
||||
"trigger_check": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
found=False
|
||||
for i in range(1, 10):
|
||||
time.sleep(1)
|
||||
logging.debug("Fetching watch overview....")
|
||||
res = client.get(
|
||||
url_for("index"))
|
||||
|
||||
if bytes("Notification error detected".encode('utf-8')) in res.data:
|
||||
found=True
|
||||
break
|
||||
|
||||
|
||||
assert found
|
||||
|
||||
|
||||
# The error should show in the notification logs
|
||||
res = client.get(
|
||||
url_for("notification_logs"))
|
||||
assert bytes("Name or service not known".encode('utf-8')) in res.data
|
||||
|
||||
|
||||
# And it should be listed on the watch overview
|
||||
231
changedetectionio/tests/test_request.py
Normal file
231
changedetectionio/tests/test_request.py
Normal file
@@ -0,0 +1,231 @@
|
||||
import json
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import set_original_response, set_modified_response, live_server_setup
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Hard to just add more live server URLs when one test is already running (I think)
|
||||
# So we add our test here (was in a different file)
|
||||
def test_headers_in_request(client, live_server):
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_headers', _external=True)
|
||||
|
||||
# Add the test URL twice, we will check
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(3)
|
||||
cookie_header = '_ga=GA1.2.1022228332; cookie-preferences=analytics:accepted;'
|
||||
|
||||
|
||||
# Add some headers to a request
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"headers": "xxx:ooo\ncool:yeah\r\ncookie:"+cookie_header},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
|
||||
# Give the thread time to pick up the first version
|
||||
time.sleep(5)
|
||||
|
||||
# The service should echo back the request headers
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Flask will convert the header key to uppercase
|
||||
assert b"Xxx:ooo" in res.data
|
||||
assert b"Cool:yeah" in res.data
|
||||
|
||||
# The test call service will return the headers as the body
|
||||
from html import escape
|
||||
assert escape(cookie_header).encode('utf-8') in res.data
|
||||
|
||||
time.sleep(5)
|
||||
|
||||
# Re #137 - Examine the JSON index file, it should have only one set of headers entered
|
||||
watches_with_headers = 0
|
||||
with open('test-datastore/url-watches.json') as f:
|
||||
app_struct = json.load(f)
|
||||
for uuid in app_struct['watching']:
|
||||
if (len(app_struct['watching'][uuid]['headers'])):
|
||||
watches_with_headers += 1
|
||||
|
||||
# Should be only one with headers set
|
||||
assert watches_with_headers==1
|
||||
|
||||
def test_body_in_request(client, live_server):
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_body', _external=True)
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(3)
|
||||
|
||||
# add the first 'version'
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"method": "POST",
|
||||
"fetch_backend": "html_requests",
|
||||
"body": "something something"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
time.sleep(3)
|
||||
|
||||
# Now the change which should trigger a change
|
||||
body_value = 'Test Body Value'
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"method": "POST",
|
||||
"fetch_backend": "html_requests",
|
||||
"body": body_value},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
time.sleep(3)
|
||||
|
||||
# The service should echo back the body
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# If this gets stuck something is wrong, something should always be there
|
||||
assert b"No history found" not in res.data
|
||||
# We should see what we sent in the reply
|
||||
assert str.encode(body_value) in res.data
|
||||
|
||||
####### data sanity checks
|
||||
# Add the test URL twice, we will check
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
watches_with_body = 0
|
||||
with open('test-datastore/url-watches.json') as f:
|
||||
app_struct = json.load(f)
|
||||
for uuid in app_struct['watching']:
|
||||
if app_struct['watching'][uuid]['body']==body_value:
|
||||
watches_with_body += 1
|
||||
|
||||
# Should be only one with body set
|
||||
assert watches_with_body==1
|
||||
|
||||
# Attempt to add a body with a GET method
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"method": "GET",
|
||||
"fetch_backend": "html_requests",
|
||||
"body": "invalid"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Body must be empty when Request Method is set to GET" in res.data
|
||||
|
||||
|
||||
def test_method_in_request(client, live_server):
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_method', _external=True)
|
||||
|
||||
# Add the test URL twice, we will check
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Attempt to add a method which is not valid
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"method": "invalid"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Not a valid choice" in res.data
|
||||
|
||||
# Add a properly formatted body
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"fetch_backend": "html_requests",
|
||||
"method": "PATCH"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Give the thread time to pick up the first version
|
||||
time.sleep(5)
|
||||
|
||||
# The service should echo back the request verb
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# The test call service will return the verb as the body
|
||||
assert b"PATCH" in res.data
|
||||
|
||||
time.sleep(5)
|
||||
|
||||
watches_with_method = 0
|
||||
with open('test-datastore/url-watches.json') as f:
|
||||
app_struct = json.load(f)
|
||||
for uuid in app_struct['watching']:
|
||||
if app_struct['watching'][uuid]['method'] == 'PATCH':
|
||||
watches_with_method += 1
|
||||
|
||||
# Should be only one with method set to PATCH
|
||||
assert watches_with_method == 1
|
||||
|
||||
36
changedetectionio/tests/test_security.py
Normal file
36
changedetectionio/tests/test_security.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from flask import url_for
|
||||
from . util import set_original_response, set_modified_response, live_server_setup
|
||||
import time
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def test_file_access(client, live_server):
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": 'https://localhost'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Attempt to add a body with a GET method
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": 'file:///etc/passwd',
|
||||
"tag": "",
|
||||
"method": "GET",
|
||||
"fetch_backend": "html_requests",
|
||||
"body": ""},
|
||||
follow_redirects=True
|
||||
)
|
||||
time.sleep(3)
|
||||
|
||||
res = client.get(
|
||||
url_for("index", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b'denied for security reasons' in res.data
|
||||
76
changedetectionio/tests/test_share_watch.py
Normal file
76
changedetectionio/tests/test_share_watch.py
Normal file
@@ -0,0 +1,76 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from urllib.request import urlopen
|
||||
from .util import set_original_response, set_modified_response, live_server_setup
|
||||
import re
|
||||
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
|
||||
def test_share_watch(client, live_server):
|
||||
set_original_response()
|
||||
live_server_setup(live_server)
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
css_filter = ".nice-filter"
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": css_filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
# Check it saved
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first"),
|
||||
)
|
||||
assert bytes(css_filter.encode('utf-8')) in res.data
|
||||
|
||||
# click share the link
|
||||
res = client.get(
|
||||
url_for("form_share_put_watch", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Share this link:" in res.data
|
||||
assert b"https://changedetection.io/share/" in res.data
|
||||
|
||||
html = res.data.decode()
|
||||
share_link_search = re.search('<span id="share-link">(.*)</span>', html, re.IGNORECASE)
|
||||
assert share_link_search
|
||||
|
||||
# Now delete what we have, we will try to re-import it
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": share_link_search.group(1)},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Now hit edit, we should see what we expect
|
||||
# that the import fetched the meta-data
|
||||
|
||||
# Check it saved
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first"),
|
||||
)
|
||||
assert bytes(css_filter.encode('utf-8')) in res.data
|
||||
95
changedetectionio/tests/test_source.py
Normal file
95
changedetectionio/tests/test_source.py
Normal file
@@ -0,0 +1,95 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from urllib.request import urlopen
|
||||
from .util import set_original_response, set_modified_response, live_server_setup
|
||||
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def test_check_basic_change_detection_functionality_source(client, live_server):
|
||||
set_original_response()
|
||||
test_url = 'source:'+url_for('test_endpoint', _external=True)
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
#####################
|
||||
|
||||
# Check HTML conversion detected and workd
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Check this class DOES appear (that we didnt see the actual source)
|
||||
assert b'foobar-detection' in res.data
|
||||
|
||||
# Make a change
|
||||
set_modified_response()
|
||||
|
||||
# Force recheck
|
||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
assert b'1 watches are queued for rechecking.' in res.data
|
||||
|
||||
time.sleep(5)
|
||||
|
||||
# Now something should be ready, indicated by having a 'unviewed' class
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("diff_history_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b'<title>modified head title' in res.data
|
||||
|
||||
|
||||
|
||||
|
||||
def test_check_ignore_elements(client, live_server):
|
||||
set_original_response()
|
||||
|
||||
time.sleep(2)
|
||||
test_url = 'source:'+url_for('test_endpoint', _external=True)
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
#####################
|
||||
# We want <span> and <p> ONLY, but ignore span with .foobar-detection
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": 'span,p', "url": test_url, "tag": "", "subtractive_selectors": ".foobar-detection", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b'foobar-detection' not in res.data
|
||||
assert b'<br' not in res.data
|
||||
assert b'<p' in res.data
|
||||
@@ -76,7 +76,7 @@ def test_trigger_functionality(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -99,7 +99,7 @@ def test_trigger_functionality(client, live_server):
|
||||
assert bytes(trigger_text.encode('utf-8')) in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -113,7 +113,7 @@ def test_trigger_functionality(client, live_server):
|
||||
set_modified_original_ignore_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
@@ -125,7 +125,12 @@ def test_trigger_functionality(client, live_server):
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
set_modified_with_trigger_text_response()
|
||||
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
# Check the preview/highlighter, we should be able to see what we triggered on, but it should be highlighted
|
||||
res = client.get(url_for("preview_page", uuid="first"))
|
||||
# We should be able to see what we ignored
|
||||
assert b'<div class="triggered">foobar' in res.data
|
||||
@@ -43,7 +43,7 @@ def test_trigger_regex_functionality(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -65,7 +65,7 @@ def test_trigger_regex_functionality(client, live_server):
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("some new noise")
|
||||
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should report nothing found (nothing should match the regex)
|
||||
@@ -75,7 +75,7 @@ def test_trigger_regex_functionality(client, live_server):
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("regex test123<br/>\nsomething 123")
|
||||
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
@@ -43,7 +43,7 @@ def test_trigger_regex_functionality(client, live_server):
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
@@ -66,7 +66,7 @@ def test_trigger_regex_functionality(client, live_server):
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("<html>some new noise with cool stuff2 ok</html>")
|
||||
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# It should report nothing found (nothing should match the regex and filter)
|
||||
@@ -76,7 +76,7 @@ def test_trigger_regex_functionality(client, live_server):
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("<html>some new noise with <span id=in-here>cool stuff6</span> ok</html>")
|
||||
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
@@ -20,8 +20,8 @@ def test_check_watch_field_storage(client, live_server):
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={ "notification_urls": "http://myapi.com",
|
||||
"minutes_between_check": 126,
|
||||
data={ "notification_urls": "json://127.0.0.1:30000\r\njson://128.0.0.1\r\n",
|
||||
"time_between_check-minutes": 126,
|
||||
"css_filter" : ".fooclass",
|
||||
"title" : "My title",
|
||||
"ignore_text" : "ignore this",
|
||||
@@ -38,8 +38,14 @@ def test_check_watch_field_storage(client, live_server):
|
||||
url_for("edit_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
# checks that we dont get an error when using blank lines in the field value
|
||||
assert not b"json://127.0.0.1\n\njson" in res.data
|
||||
assert not b"json://127.0.0.1\r\n\njson" in res.data
|
||||
assert not b"json://127.0.0.1\r\n\rjson" in res.data
|
||||
|
||||
assert b"json://127.0.0.1" in res.data
|
||||
assert b"json://128.0.0.1" in res.data
|
||||
|
||||
assert b"http://myapi.com" in res.data
|
||||
assert b"126" in res.data
|
||||
assert b".fooclass" in res.data
|
||||
assert b"My title" in res.data
|
||||
@@ -56,8 +62,8 @@ def test_check_recheck_global_setting(client, live_server):
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"minutes_between_check": 1566,
|
||||
'fetch_backend': "html_requests"
|
||||
"requests-time_between_check-minutes": 1566,
|
||||
'application-fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -88,8 +94,8 @@ def test_check_recheck_global_setting(client, live_server):
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"minutes_between_check": 222,
|
||||
'fetch_backend': "html_requests"
|
||||
"requests-time_between_check-minutes": 222,
|
||||
'application-fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -108,7 +114,7 @@ def test_check_recheck_global_setting(client, live_server):
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"url": test_url,
|
||||
"minutes_between_check": 55,
|
||||
"time_between_check-minutes": 55,
|
||||
'fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
@@ -124,8 +130,8 @@ def test_check_recheck_global_setting(client, live_server):
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={
|
||||
"minutes_between_check": 666,
|
||||
'fetch_backend': "html_requests"
|
||||
"requests-time_between_check-minutes": 666,
|
||||
"application-fetch_backend": "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -134,7 +140,7 @@ def test_check_recheck_global_setting(client, live_server):
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"url": test_url,
|
||||
"minutes_between_check": "",
|
||||
"time_between_check-minutes": "",
|
||||
'fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
@@ -147,4 +153,3 @@ def test_check_recheck_global_setting(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"666" in res.data
|
||||
|
||||
|
||||
161
changedetectionio/tests/test_xpath_selector.py
Normal file
161
changedetectionio/tests/test_xpath_selector.py
Normal file
@@ -0,0 +1,161 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
|
||||
from ..html_tools import *
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def set_original_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
<div class="sametext">Some text thats the same</div>
|
||||
<div class="changetext">Some text that will change</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
def set_modified_response():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
</br>
|
||||
So let's see what happens. THIS CHANGES AND SHOULDNT TRIGGER A CHANGE</br>
|
||||
<div class="sametext">Some text thats the same</div>
|
||||
<div class="changetext">Some new text</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def test_check_markup_xpath_filter_restriction(client, live_server):
|
||||
sleep_time_for_fetch_thread = 3
|
||||
|
||||
xpath_filter = "//*[contains(@class, 'sametext')]"
|
||||
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": xpath_filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
# view it/reset state back to viewed
|
||||
client.get(url_for("diff_history_page", uuid="first"), follow_redirects=True)
|
||||
|
||||
# Make a change
|
||||
set_modified_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(sleep_time_for_fetch_thread)
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
|
||||
|
||||
def test_xpath_validation(client, live_server):
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": "/something horrible", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"is not a valid XPath expression" in res.data
|
||||
|
||||
|
||||
# actually only really used by the distll.io importer, but could be handy too
|
||||
def test_check_with_prefix_css_filter(client, live_server):
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
set_original_response()
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(3)
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(3)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
with open('/tmp/fuck.html', 'wb') as f:
|
||||
f.write(res.data)
|
||||
assert b"Some text thats the same" in res.data #in selector
|
||||
assert b"Some text that will change" not in res.data #not in selector
|
||||
|
||||
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
1
changedetectionio/tests/unit/__init__.py
Normal file
1
changedetectionio/tests/unit/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Unit tests for the app."""
|
||||
5
changedetectionio/tests/unit/test-content/README.md
Normal file
5
changedetectionio/tests/unit/test-content/README.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# What is this?
|
||||
This is test content for the python diff engine, we use the JS interface for the front end, because you can explore
|
||||
differences in words etc, but we use (at the moment) the python difflib engine.
|
||||
|
||||
This content `before.txt` and `after.txt` is for unit testing
|
||||
3
changedetectionio/tests/unit/test-content/after-2.txt
Normal file
3
changedetectionio/tests/unit/test-content/after-2.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
After twenty years, as cursed as I may be
|
||||
ok
|
||||
and insure that I'm one of those computer nerds.
|
||||
7
changedetectionio/tests/unit/test-content/after.txt
Normal file
7
changedetectionio/tests/unit/test-content/after.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
After twenty years, as cursed as I may be
|
||||
for having learned computerese,
|
||||
I continue to examine bits, bytes and words
|
||||
xok
|
||||
next-x-ok
|
||||
and insure that I'm one of those computer nerds.
|
||||
and something new
|
||||
5
changedetectionio/tests/unit/test-content/before.txt
Normal file
5
changedetectionio/tests/unit/test-content/before.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
After twenty years, as cursed as I may be
|
||||
for having learned computerese,
|
||||
I continue to examine bits, bytes and words
|
||||
ok
|
||||
and insure that I'm one of those computer nerds.
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user