Compare commits
321 Commits
jinja2-url
...
0.45.6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
89797dfe02 | ||
|
|
c905652780 | ||
|
|
99246d3e6d | ||
|
|
f9f69bf0dd | ||
|
|
68efb25e9b | ||
|
|
70606ab05d | ||
|
|
d3c8386874 | ||
|
|
47103d7f3d | ||
|
|
03c671bfff | ||
|
|
e209d9fba0 | ||
|
|
3b43da35ec | ||
|
|
a0665e1f18 | ||
|
|
9ffe7e0eaf | ||
|
|
3e5671a3a2 | ||
|
|
cd1aca9ee3 | ||
|
|
6a589e14f3 | ||
|
|
dbb76f3618 | ||
|
|
4ae27af511 | ||
|
|
e1860549dc | ||
|
|
9765d56a23 | ||
|
|
349111eb35 | ||
|
|
71e50569a0 | ||
|
|
c372942295 | ||
|
|
0aef5483d9 | ||
|
|
c266c64b94 | ||
|
|
32e5498a9d | ||
|
|
0ba7928d58 | ||
|
|
1709e8f936 | ||
|
|
b16d65741c | ||
|
|
1cadcc6d15 | ||
|
|
b58d521d19 | ||
|
|
52225f2ad8 | ||
|
|
7220afab0a | ||
|
|
1c0fe4c23e | ||
|
|
4f6b0eb8a5 | ||
|
|
f707c914b6 | ||
|
|
9cb636e638 | ||
|
|
1d5fe51157 | ||
|
|
c0b49d3be9 | ||
|
|
c4dc85525f | ||
|
|
26159840c8 | ||
|
|
522e9786c6 | ||
|
|
9ce86a2835 | ||
|
|
f9f6300a70 | ||
|
|
7734b22a19 | ||
|
|
da421fe110 | ||
|
|
3e2b55a46f | ||
|
|
7ace259d70 | ||
|
|
aa6ad7bf47 | ||
|
|
40dd29dbc6 | ||
|
|
7debccca73 | ||
|
|
59578803bf | ||
|
|
a5db3a0b99 | ||
|
|
49a5337ac4 | ||
|
|
ceac8c21e4 | ||
|
|
a7132b1cfc | ||
|
|
2b948c15c1 | ||
|
|
34f2d30968 | ||
|
|
700729a332 | ||
|
|
b6060ac90c | ||
|
|
5cccccb0b6 | ||
|
|
c52eb512e8 | ||
|
|
7282df9c08 | ||
|
|
e30b17b8bc | ||
|
|
1e88136325 | ||
|
|
57de4ffe4f | ||
|
|
51e2e8a226 | ||
|
|
8887459462 | ||
|
|
460c724e51 | ||
|
|
dcf4bf37ed | ||
|
|
e3cf22fc27 | ||
|
|
d497db639e | ||
|
|
7355ac8d21 | ||
|
|
2f2d0ea0f2 | ||
|
|
a958e1fe20 | ||
|
|
5dc3b00ec6 | ||
|
|
8ac4757cd9 | ||
|
|
2180bb256d | ||
|
|
212f15ad5f | ||
|
|
22b2068208 | ||
|
|
4916043055 | ||
|
|
7bf13bad30 | ||
|
|
0aa2276afb | ||
|
|
3b875e5a6a | ||
|
|
8ec50294d2 | ||
|
|
e3c9255d9e | ||
|
|
3b03bdcb82 | ||
|
|
e25792bcec | ||
|
|
bf4168a2aa | ||
|
|
9d37eaa57b | ||
|
|
40d01acde9 | ||
|
|
d34832de73 | ||
|
|
ed4bafae63 | ||
|
|
3a5bceadfa | ||
|
|
6abdf2d332 | ||
|
|
dee23709a9 | ||
|
|
52df3b10e7 | ||
|
|
087d21c61e | ||
|
|
171faf465c | ||
|
|
a3d8bd0b1a | ||
|
|
6ef8a1c18f | ||
|
|
126f0fbf87 | ||
|
|
cfa712c88c | ||
|
|
6a6ba40b6a | ||
|
|
e7f726c057 | ||
|
|
df0cc7b585 | ||
|
|
76cd98b521 | ||
|
|
f84ba0fb31 | ||
|
|
c35cbd33d6 | ||
|
|
661f7fe32c | ||
|
|
7cb7eebbc5 | ||
|
|
aaceb4ebad | ||
|
|
56cf6e5ea5 | ||
|
|
1987e109e8 | ||
|
|
20d65cdd26 | ||
|
|
37ff5f6d37 | ||
|
|
2f777ea3bb | ||
|
|
e709201955 | ||
|
|
572f71299f | ||
|
|
5f150c4f03 | ||
|
|
8cbf8e8f57 | ||
|
|
0e65dda5b6 | ||
|
|
72a415144b | ||
|
|
52f2c00308 | ||
|
|
72311fb845 | ||
|
|
f1b10a22f8 | ||
|
|
a4c620c308 | ||
|
|
9434eac72d | ||
|
|
edb5e20de6 | ||
|
|
e62eeb1c4a | ||
|
|
a4e6fd1ec3 | ||
|
|
d8b9f0fd78 | ||
|
|
f9387522ee | ||
|
|
ba8d2e0c2d | ||
|
|
247db22a33 | ||
|
|
aeabd5b3fc | ||
|
|
e9e1ce893f | ||
|
|
b5a415c7b6 | ||
|
|
9e954532d6 | ||
|
|
955835df72 | ||
|
|
1aeafef910 | ||
|
|
1367197df7 | ||
|
|
143971123d | ||
|
|
04d2d3fb00 | ||
|
|
236f0c098d | ||
|
|
582c6b465b | ||
|
|
a021ba87fa | ||
|
|
e9057cb851 | ||
|
|
72ec438caa | ||
|
|
367dec48e1 | ||
|
|
dd87912c88 | ||
|
|
0126cb0aac | ||
|
|
463b2d0449 | ||
|
|
e4f6d54ae2 | ||
|
|
5f338d7824 | ||
|
|
0b563a93ec | ||
|
|
d939882dde | ||
|
|
690cf4acc9 | ||
|
|
3cb3c7ba2e | ||
|
|
5325918f29 | ||
|
|
8eee913438 | ||
|
|
06921d973e | ||
|
|
316f28a0f2 | ||
|
|
3801d339f5 | ||
|
|
d814535dc6 | ||
|
|
cf3f3e4497 | ||
|
|
ba76c2a280 | ||
|
|
94f38f052e | ||
|
|
1710885fc4 | ||
|
|
2018e73240 | ||
|
|
fae8c89a4e | ||
|
|
40988c55c6 | ||
|
|
5aa713b7ea | ||
|
|
e1f5dfb703 | ||
|
|
966600d28e | ||
|
|
e7ac356d99 | ||
|
|
e874df4ffc | ||
|
|
d1f44d0345 | ||
|
|
8536af0845 | ||
|
|
9076ba6bd3 | ||
|
|
43af18e2bc | ||
|
|
ad75e8cdd0 | ||
|
|
f604643356 | ||
|
|
d5fd22f693 | ||
|
|
1d9d11b3f5 | ||
|
|
f49464f451 | ||
|
|
bc6bde4062 | ||
|
|
2863167f45 | ||
|
|
ce3966c104 | ||
|
|
d5f574ca17 | ||
|
|
c96ece170a | ||
|
|
1fb90bbddc | ||
|
|
55b6ae86e8 | ||
|
|
66b892f770 | ||
|
|
3b80bb2f0e | ||
|
|
e6d2d87b31 | ||
|
|
6e71088cde | ||
|
|
2bc988dffc | ||
|
|
a578de36c5 | ||
|
|
4c74d39df0 | ||
|
|
c454cbb808 | ||
|
|
6f1eec0d5a | ||
|
|
0d05ee1586 | ||
|
|
23476f0e70 | ||
|
|
cf363971c1 | ||
|
|
35409f79bf | ||
|
|
fc88306805 | ||
|
|
8253074d56 | ||
|
|
5f9c8db3e1 | ||
|
|
abf234298c | ||
|
|
0e1032a36a | ||
|
|
3b96e40464 | ||
|
|
c747cf7ba8 | ||
|
|
3e98c8ae4b | ||
|
|
aaad71fc19 | ||
|
|
78f93113d8 | ||
|
|
e9e586205a | ||
|
|
89f1ba58b6 | ||
|
|
6f4fd011e3 | ||
|
|
900dc5ee78 | ||
|
|
7b8b50138b | ||
|
|
01af21f856 | ||
|
|
f7f4ab314b | ||
|
|
ce0355c0ad | ||
|
|
0f43213d9d | ||
|
|
93c57d9fad | ||
|
|
3cdd075baf | ||
|
|
5c617e8530 | ||
|
|
1a48965ba1 | ||
|
|
41856c4ed8 | ||
|
|
0ed897c50f | ||
|
|
f8e587c415 | ||
|
|
d47a25eb6d | ||
|
|
9a0792d185 | ||
|
|
948ef7ade4 | ||
|
|
0ba139f8f9 | ||
|
|
a9431191fc | ||
|
|
774451f256 | ||
|
|
04577cbf32 | ||
|
|
f2864af8f1 | ||
|
|
9a36d081c4 | ||
|
|
7048a0acbd | ||
|
|
fba719ab8d | ||
|
|
7c5e2d00af | ||
|
|
02b8fc0c18 | ||
|
|
de15dfd80d | ||
|
|
024c8d8fd5 | ||
|
|
fab7d325f7 | ||
|
|
58c7cbeac7 | ||
|
|
ab9efdfd14 | ||
|
|
65d5a5d34c | ||
|
|
93c157ee7f | ||
|
|
de85db887c | ||
|
|
50805ca38a | ||
|
|
fc6424c39e | ||
|
|
f0966eb23a | ||
|
|
e4fb5ab4da | ||
|
|
e99f07a51d | ||
|
|
08ee223b5f | ||
|
|
572f9b8a31 | ||
|
|
fcfd1b5e10 | ||
|
|
0790dd555e | ||
|
|
0b20dc7712 | ||
|
|
13c4121f52 | ||
|
|
e8e176f3bd | ||
|
|
7a1d2d924e | ||
|
|
c3731cf055 | ||
|
|
a287e5a86c | ||
|
|
235535c327 | ||
|
|
44dc62da2d | ||
|
|
0c380c170f | ||
|
|
b7a2501d64 | ||
|
|
e970fef991 | ||
|
|
b76148a0f4 | ||
|
|
93cc30437f | ||
|
|
6562d6e0d4 | ||
|
|
6c217cc3b6 | ||
|
|
f30cdf0674 | ||
|
|
14da0646a7 | ||
|
|
b413cdecc7 | ||
|
|
7bf52d9275 | ||
|
|
09e6624afd | ||
|
|
b58fd995b5 | ||
|
|
f7bb8a0afa | ||
|
|
3e333496c1 | ||
|
|
ee776a9627 | ||
|
|
65db4d68e3 | ||
|
|
74d93d10c3 | ||
|
|
37aef0530a | ||
|
|
f86763dc7a | ||
|
|
13c25f9b92 | ||
|
|
265f622e75 | ||
|
|
c12db2b725 | ||
|
|
a048e4a02d | ||
|
|
69662ff91c | ||
|
|
fc94c57d7f | ||
|
|
7b94ba6f23 | ||
|
|
2345b6b558 | ||
|
|
b8d5a12ad0 | ||
|
|
9e67a572c5 | ||
|
|
378d7b7362 | ||
|
|
d1d4045c49 | ||
|
|
77409eeb3a | ||
|
|
87726e0bb2 | ||
|
|
72222158e9 | ||
|
|
1814924c19 | ||
|
|
8aae4197d7 | ||
|
|
3a8a41a3ff | ||
|
|
64caeea491 | ||
|
|
3838bff397 | ||
|
|
55ea983bda | ||
|
|
b4d79839bf | ||
|
|
0b8c3add34 | ||
|
|
51d57f0963 | ||
|
|
6d932149e3 | ||
|
|
2c764e8f84 | ||
|
|
07765b0d38 | ||
|
|
7c3faa8e38 | ||
|
|
4624974b91 | ||
|
|
991841f1f9 | ||
|
|
e3db324698 |
@@ -1,2 +1,18 @@
|
||||
.git
|
||||
.github
|
||||
changedetectionio/processors/__pycache__
|
||||
changedetectionio/api/__pycache__
|
||||
changedetectionio/model/__pycache__
|
||||
changedetectionio/blueprint/price_data_follower/__pycache__
|
||||
changedetectionio/blueprint/tags/__pycache__
|
||||
changedetectionio/blueprint/__pycache__
|
||||
changedetectionio/blueprint/browser_steps/__pycache__
|
||||
changedetectionio/fetchers/__pycache__
|
||||
changedetectionio/tests/visualselector/__pycache__
|
||||
changedetectionio/tests/restock/__pycache__
|
||||
changedetectionio/tests/__pycache__
|
||||
changedetectionio/tests/fetchers/__pycache__
|
||||
changedetectionio/tests/unit/__pycache__
|
||||
changedetectionio/tests/proxy_list/__pycache__
|
||||
changedetectionio/__pycache__
|
||||
|
||||
|
||||
4
.github/test/Dockerfile-alpine
vendored
@@ -2,7 +2,7 @@
|
||||
# Test that we can still build on Alpine (musl modified libc https://musl.libc.org/)
|
||||
# Some packages wont install via pypi because they dont have a wheel available under this architecture.
|
||||
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.16
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.18
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
COPY requirements.txt /requirements.txt
|
||||
@@ -26,6 +26,6 @@ RUN \
|
||||
py3-pip && \
|
||||
echo "**** pip3 install test of changedetection.io ****" && \
|
||||
pip3 install -U pip wheel setuptools && \
|
||||
pip3 install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.16/ -r /requirements.txt && \
|
||||
pip3 install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.18/ -r /requirements.txt && \
|
||||
apk del --purge \
|
||||
build-dependencies
|
||||
|
||||
8
.github/workflows/codeql-analysis.yml
vendored
@@ -30,11 +30,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
@@ -59,4 +59,4 @@ jobs:
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
||||
31
.github/workflows/containers.yml
vendored
@@ -39,18 +39,17 @@ jobs:
|
||||
# Or if we are in a tagged release scenario.
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }} || ${{ github.event.release.tag_name }} != ''
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: 3.11
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
|
||||
|
||||
- name: Create release metadata
|
||||
run: |
|
||||
@@ -59,27 +58,27 @@ jobs:
|
||||
echo ${{ github.ref }} > changedetectionio/tag.txt
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
version: latest
|
||||
@@ -89,22 +88,24 @@ jobs:
|
||||
- name: Build and push :dev
|
||||
id: docker_build
|
||||
if: ${{ github.ref }} == "refs/heads/master"
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
# Looks like this was disabled
|
||||
# provenance: false
|
||||
|
||||
# A new tagged release is required, which builds :tag and :latest
|
||||
- name: Build and push :tag
|
||||
id: docker_build_tag_release
|
||||
if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.')
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
@@ -114,15 +115,17 @@ jobs:
|
||||
ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }}
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest
|
||||
ghcr.io/dgtlmoon/changedetection.io:latest
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
# Looks like this was disabled
|
||||
# provenance: false
|
||||
|
||||
- name: Image digest
|
||||
run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
|
||||
44
.github/workflows/pypi.yml
vendored
@@ -1,44 +0,0 @@
|
||||
name: PyPi Test and Push tagged release
|
||||
|
||||
# Triggers the workflow on push or pull request events
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["ChangeDetection.io Test"]
|
||||
tags: '*.*'
|
||||
types: [completed]
|
||||
|
||||
|
||||
jobs:
|
||||
test-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
# - name: Install dependencies
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip
|
||||
# pip install flake8 pytest
|
||||
# if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
# if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
|
||||
|
||||
- name: Test that pip builds without error
|
||||
run: |
|
||||
pip3 --version
|
||||
python3 -m pip install wheel
|
||||
python3 setup.py bdist_wheel
|
||||
python3 -m pip install dist/changedetection.io-*-none-any.whl --force
|
||||
changedetection.io -d /tmp -p 10000 &
|
||||
sleep 3
|
||||
curl http://127.0.0.1:10000/static/styles/pure-min.css >/dev/null
|
||||
killall -9 changedetection.io
|
||||
|
||||
# https://github.com/docker/build-push-action/blob/master/docs/advanced/test-before-push.md ?
|
||||
# https://github.com/docker/buildx/issues/59 ? Needs to be one platform?
|
||||
|
||||
# https://github.com/docker/buildx/issues/495#issuecomment-918925854
|
||||
#if: ${{ github.event_name == 'release'}}
|
||||
20
.github/workflows/test-container-build.yml
vendored
@@ -10,11 +10,13 @@ on:
|
||||
paths:
|
||||
- requirements.txt
|
||||
- Dockerfile
|
||||
- .github/workflows/*
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- requirements.txt
|
||||
- Dockerfile
|
||||
- .github/workflows/*
|
||||
|
||||
# Changes to requirements.txt packages and Dockerfile may or may not always be compatible with arm etc, so worth testing
|
||||
# @todo: some kind of path filter for requirements.txt and Dockerfile
|
||||
@@ -22,22 +24,22 @@ jobs:
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: 3.11
|
||||
|
||||
# Just test that the build works, some libraries won't compile on ARM/rPi etc
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
version: latest
|
||||
@@ -47,7 +49,7 @@ jobs:
|
||||
# Check we can still build under alpine/musl
|
||||
- name: Test that the docker containers can build (musl via alpine check)
|
||||
id: docker_build_musl
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./
|
||||
file: ./.github/test/Dockerfile-alpine
|
||||
@@ -55,12 +57,12 @@ jobs:
|
||||
|
||||
- name: Test that the docker containers can build
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v5
|
||||
# https://github.com/docker/build-push-action#customizing
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
platforms: linux/arm/v7,linux/arm/v6,linux/amd64,linux/arm64,
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
|
||||
95
.github/workflows/test-only.yml
vendored
@@ -7,33 +7,96 @@ jobs:
|
||||
test-application:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
|
||||
# Mainly just for link/flake8
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
pip3 install flake8
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
|
||||
- name: Unit tests
|
||||
- name: Spin up ancillary testable services
|
||||
run: |
|
||||
python3 -m unittest changedetectionio.tests.unit.test_notification_diff
|
||||
|
||||
docker network create changedet-network
|
||||
|
||||
- name: Test with pytest
|
||||
# Selenium+browserless
|
||||
docker run --network changedet-network -d --hostname selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome:4
|
||||
docker run --network changedet-network -d --hostname browserless -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.60-chrome-stable
|
||||
|
||||
- name: Build changedetection.io container for testing
|
||||
run: |
|
||||
# Build a changedetection.io container and start testing inside
|
||||
docker build . -t test-changedetectionio
|
||||
# Debug info
|
||||
docker run test-changedetectionio bash -c 'pip list'
|
||||
|
||||
- name: Spin up ancillary SMTP+Echo message test server
|
||||
run: |
|
||||
# Each test is totally isolated and performs its own cleanup/reset
|
||||
cd changedetectionio; ./run_all_tests.sh
|
||||
# Debug SMTP server/echo message back server
|
||||
docker run --network changedet-network -d -p 11025:11025 -p 11080:11080 --hostname mailserver test-changedetectionio bash -c 'python changedetectionio/tests/smtp/smtp-test-server.py'
|
||||
|
||||
- name: Test built container with pytest
|
||||
run: |
|
||||
# Unit tests
|
||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_notification_diff'
|
||||
|
||||
# All tests
|
||||
docker run --network changedet-network test-changedetectionio bash -c 'cd changedetectionio && ./run_basic_tests.sh'
|
||||
|
||||
- name: Test built container selenium+browserless/playwright
|
||||
run: |
|
||||
|
||||
# Selenium fetch
|
||||
docker run --rm -e "WEBDRIVER_URL=http://selenium:4444/wd/hub" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py'
|
||||
|
||||
# Playwright/Browserless fetch
|
||||
docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
|
||||
|
||||
# Settings headers playwright tests - Call back in from Browserless, check headers
|
||||
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "WEBDRIVER_URL=http://selenium:4444/wd/hub" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||
|
||||
# restock detection via playwright - added name=changedet here so that playwright/browserless can connect to it
|
||||
docker run --rm --name "changedet" -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-port=5004 --live-server-host=0.0.0.0 tests/restock/test_restock.py'
|
||||
|
||||
- name: Test SMTP notification mime types
|
||||
run: |
|
||||
# SMTP content types - needs the 'Debug SMTP server/echo message back server' container from above
|
||||
docker run --rm --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/smtp/test_notification_smtp.py'
|
||||
|
||||
- name: Test with puppeteer fetcher and disk cache
|
||||
run: |
|
||||
docker run --rm -e "PUPPETEER_DISK_CACHE=/tmp/data/" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
|
||||
# Browserless would have had -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" added above
|
||||
|
||||
- name: Test proxy interaction
|
||||
run: |
|
||||
cd changedetectionio
|
||||
./run_proxy_tests.sh
|
||||
# And again with PLAYWRIGHT_DRIVER_URL=..
|
||||
cd ..
|
||||
|
||||
- name: Test changedetection.io container starts+runs basically without error
|
||||
run: |
|
||||
docker run -p 5556:5000 -d test-changedetectionio
|
||||
sleep 3
|
||||
# Should return 0 (no error) when grep finds it
|
||||
curl -s http://localhost:5556 |grep -q checkbox-uuid
|
||||
|
||||
# and IPv6
|
||||
curl -s -g -6 "http://[::1]:5556"|grep -q checkbox-uuid
|
||||
|
||||
|
||||
#export WEBDRIVER_URL=http://localhost:4444/wd/hub
|
||||
#pytest tests/fetchers/test_content.py
|
||||
#pytest tests/test_errorhandling.py
|
||||
|
||||
36
.github/workflows/test-pip-build.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: ChangeDetection.io PIP package test
|
||||
|
||||
# Triggers the workflow on push or pull request events
|
||||
|
||||
# This line doesnt work, even tho it is the documented one
|
||||
on: [push, pull_request]
|
||||
|
||||
# Changes to requirements.txt packages and Dockerfile may or may not always be compatible with arm etc, so worth testing
|
||||
# @todo: some kind of path filter for requirements.txt and Dockerfile
|
||||
jobs:
|
||||
test-pip-build-basics:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.11
|
||||
|
||||
|
||||
- name: Test that the basic pip built package runs without error
|
||||
run: |
|
||||
set -e
|
||||
mkdir dist
|
||||
pip3 install wheel
|
||||
python3 setup.py bdist_wheel
|
||||
pip3 install -r requirements.txt
|
||||
rm ./changedetection.py
|
||||
rm -rf changedetectio
|
||||
|
||||
pip3 install dist/changedetection.io*.whl
|
||||
changedetection.io -d /tmp -p 10000 &
|
||||
sleep 3
|
||||
curl http://127.0.0.1:10000/static/styles/pure-min.css >/dev/null
|
||||
killall -9 changedetection.io
|
||||
@@ -7,9 +7,3 @@ Otherwise, it's always best to PR into the `dev` branch.
|
||||
Please be sure that all new functionality has a matching test!
|
||||
|
||||
Use `pytest` to validate/test, you can run the existing tests as `pytest tests/test_notification.py` for example
|
||||
|
||||
```
|
||||
pip3 install -r requirements-dev
|
||||
```
|
||||
|
||||
this is from https://github.com/dgtlmoon/changedetection.io/blob/master/requirements-dev.txt
|
||||
|
||||
32
Dockerfile
@@ -1,7 +1,7 @@
|
||||
# pip dependencies install stage
|
||||
FROM python:3.8-slim as builder
|
||||
FROM python:3.11-slim-bookworm as builder
|
||||
|
||||
# rustc compiler would be needed on ARM type devices but theres an issue with some deps not building..
|
||||
# See `cryptography` pin comment in requirements.txt
|
||||
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
@@ -20,31 +20,29 @@ WORKDIR /install
|
||||
|
||||
COPY requirements.txt /requirements.txt
|
||||
|
||||
# Instructing pip to fetch wheels from piwheels.org" on ARMv6 and ARMv7 machines
|
||||
RUN if [ "$(dpkg --print-architecture)" = "armhf" ] || [ "$(dpkg --print-architecture)" = "armel" ]; then \
|
||||
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf; \
|
||||
fi;
|
||||
|
||||
RUN pip install --target=/dependencies -r /requirements.txt
|
||||
|
||||
# Playwright is an alternative to Selenium
|
||||
# Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing
|
||||
# https://github.com/dgtlmoon/changedetection.io/pull/1067 also musl/alpine (not supported)
|
||||
RUN pip install --target=/dependencies playwright~=1.27.1 \
|
||||
RUN pip install --target=/dependencies playwright~=1.39 \
|
||||
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
|
||||
|
||||
# Final image stage
|
||||
FROM python:3.8-slim
|
||||
FROM python:3.11-slim-bookworm
|
||||
|
||||
# Actual packages needed at runtime, usually due to the notification (apprise) backend
|
||||
# rustc compiler would be needed on ARM type devices but theres an issue with some deps not building..
|
||||
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
|
||||
|
||||
# Re #93, #73, excluding rustc (adds another 430Mb~)
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
g++ \
|
||||
gcc \
|
||||
libc-dev \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libssl-dev \
|
||||
libxslt-dev \
|
||||
zlib1g-dev
|
||||
libxslt1.1 \
|
||||
# For pdftohtml
|
||||
poppler-utils \
|
||||
zlib1g \
|
||||
&& apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
# https://stackoverflow.com/questions/58701233/docker-logs-erroneously-appears-empty-until-container-stops
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
11
MANIFEST.in
@@ -1,9 +1,11 @@
|
||||
recursive-include changedetectionio/api *
|
||||
recursive-include changedetectionio/templates *
|
||||
recursive-include changedetectionio/static *
|
||||
recursive-include changedetectionio/blueprint *
|
||||
recursive-include changedetectionio/model *
|
||||
recursive-include changedetectionio/tests *
|
||||
recursive-include changedetectionio/processors *
|
||||
recursive-include changedetectionio/res *
|
||||
recursive-include changedetectionio/static *
|
||||
recursive-include changedetectionio/templates *
|
||||
recursive-include changedetectionio/tests *
|
||||
prune changedetectionio/static/package-lock.json
|
||||
prune changedetectionio/static/styles/node_modules
|
||||
prune changedetectionio/static/styles/package-lock.json
|
||||
@@ -11,3 +13,6 @@ include changedetection.py
|
||||
global-exclude *.pyc
|
||||
global-exclude node_modules
|
||||
global-exclude venv
|
||||
|
||||
global-exclude test-datastore
|
||||
global-exclude changedetection.io*dist-info
|
||||
|
||||
@@ -2,19 +2,44 @@
|
||||
|
||||
Live your data-life pro-actively, track website content changes and receive notifications via Discord, Email, Slack, Telegram and 70+ more
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start?src=pip)
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring, list of websites with changes" title="Self-hosted web page change monitoring, list of websites with changes" />](https://changedetection.io)
|
||||
|
||||
|
||||
[**Don't have time? Let us host it for you! try our extremely affordable subscription use our proxies and support!**](https://lemonade.changedetection.io/start)
|
||||
[**Don't have time? Let us host it for you! try our extremely affordable subscription use our proxies and support!**](https://changedetection.io)
|
||||
|
||||
|
||||
#### Example use cases
|
||||
### Target specific parts of the webpage using the Visual Selector tool.
|
||||
|
||||
Available when connected to a <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Playwright-content-fetcher">playwright content fetcher</a> (included as part of our subscription service)
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/visualselector-anim.gif" style="max-width:100%;" alt="Select parts and elements of a web page to monitor for changes" title="Select parts and elements of a web page to monitor for changes" />](https://changedetection.io?src=pip)
|
||||
|
||||
### Easily see what changed, examine by word, line, or individual character.
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot-diff.png" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />](https://changedetection.io?src=pip)
|
||||
|
||||
|
||||
### Perform interactive browser steps
|
||||
|
||||
Fill in text boxes, click buttons and more, setup your changedetection scenario.
|
||||
|
||||
Using the **Browser Steps** configuration, add basic steps before performing change detection, such as logging into websites, adding a product to a cart, accept cookie logins, entering dates and refining searches.
|
||||
|
||||
[<img src="docs/browsersteps-anim.gif" style="max-width:100%;" alt="Website change detection with interactive browser steps, detect changes behind login and password, search queries and more" title="Website change detection with interactive browser steps, detect changes behind login and password, search queries and more" />](https://changedetection.io?src=pip)
|
||||
|
||||
After **Browser Steps** have been run, then visit the **Visual Selector** tab to refine the content you're interested in.
|
||||
Requires Playwright to be enabled.
|
||||
|
||||
|
||||
### Example use cases
|
||||
|
||||
- Products and services have a change in pricing
|
||||
- _Out of stock notification_ and _Back In stock notification_
|
||||
- Monitor and track PDF file changes, know when a PDF file has text changes.
|
||||
- Governmental department updates (changes are often only on their websites)
|
||||
- New software releases, security advisories when you're not on their mailing list.
|
||||
- Festivals with changes
|
||||
- Discogs restock alerts and monitoring
|
||||
- Realestate listing changes
|
||||
- Know when your favourite whiskey is on sale, or other special deals are announced before anyone else
|
||||
- COVID related news from government websites
|
||||
@@ -27,18 +52,34 @@ Live your data-life pro-actively, track website content changes and receive noti
|
||||
- Create RSS feeds based on changes in web content
|
||||
- Monitor HTML source code for unexpected changes, strengthen your PCI compliance
|
||||
- You have a very sensitive list of URLs to watch and you do _not_ want to use the paid alternatives. (Remember, _you_ are the product)
|
||||
- Get notified when certain keywords appear in Twitter search results
|
||||
- Proactively search for jobs, get notified when companies update their careers page, search job portals for keywords.
|
||||
- Get alerts when new job positions are open on Bamboo HR and other job platforms
|
||||
- Website defacement monitoring
|
||||
- Pokémon Card Restock Tracker / Pokémon TCG Tracker
|
||||
- RegTech - stay ahead of regulatory changes, regulatory compliance
|
||||
|
||||
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver and Playwright!</a>_
|
||||
|
||||
#### Key Features
|
||||
|
||||
- Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions!
|
||||
- Target elements with xPath and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
|
||||
- Target elements with xPath(1.0) and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
|
||||
- Switch between fast non-JS and Chrome JS based "fetchers"
|
||||
- Track changes in PDF files (Monitor text changed in the PDF, Also monitor PDF filesize and checksums)
|
||||
- Easily specify how often a site should be checked
|
||||
- Execute JS before extracting text (Good for logging in, see examples in the UI!)
|
||||
- Override Request Headers, Specify `POST` or `GET` and other methods
|
||||
- Use the "Visual Selector" to help target specific elements
|
||||
- Configurable [proxy per watch](https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration)
|
||||
- Send a screenshot with the notification when a change is detected in the web page
|
||||
|
||||
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $100 using our signup link.
|
||||
|
||||
[Oxylabs](https://oxylabs.go2cloud.org/SH2d) is also an excellent proxy provider and well worth using, they offer Residental, ISP, Rotating and many other proxy types to suit your project.
|
||||
|
||||
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||
|
||||
|
||||
|
||||
```bash
|
||||
@@ -54,5 +95,5 @@ $ changedetection.io -d /path/to/empty/data/dir -p 5000
|
||||
|
||||
Then visit http://127.0.0.1:5000 , You should now be able to access the UI.
|
||||
|
||||
See https://github.com/dgtlmoon/changedetection.io for more information.
|
||||
See https://changedetection.io for more information.
|
||||
|
||||
|
||||
112
README.md
@@ -1,33 +1,55 @@
|
||||
## Web Site Change Detection, Monitoring and Notification.
|
||||
## Web Site Change Detection, Restock monitoring and notifications.
|
||||
|
||||
_Live your data-life pro-actively, Detect website changes and perform meaningful actions, trigger notifications via Discord, Email, Slack, Telegram, API calls and many more._
|
||||
**_Detect website content changes and perform meaningful actions - trigger notifications via Discord, Email, Slack, Telegram, API calls and many more._**
|
||||
|
||||
_Live your data-life pro-actively._
|
||||
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start?src=github)
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web site page change monitoring" title="Self-hosted web site page change monitoring" />](https://changedetection.io?src=github)
|
||||
|
||||
[![Release Version][release-shield]][release-link] [![Docker Pulls][docker-pulls]][docker-link] [![License][license-shield]](LICENSE.md)
|
||||
|
||||

|
||||
|
||||
[**Don't have time? Let us host it for you! try our $6.99/month subscription - use our proxies and support!**](https://lemonade.changedetection.io/start) , _half the price of other website change monitoring services and comes with unlimited watches & checks!_
|
||||
[**Don't have time? Let us host it for you! try our $8.99/month subscription - use our proxies and support!**](https://changedetection.io) , _half the price of other website change monitoring services!_
|
||||
|
||||
- Chrome browser included.
|
||||
- Super fast, no registration needed setup.
|
||||
- Start watching and receiving change notifications instantly.
|
||||
- Get started watching and receiving website change notifications straight away.
|
||||
|
||||
|
||||
Easily see what changed, examine by word, line, or individual character.
|
||||
### Target specific parts of the webpage using the Visual Selector tool.
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot-diff.png" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />
|
||||
Available when connected to a <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Playwright-content-fetcher">playwright content fetcher</a> (included as part of our subscription service)
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/visualselector-anim.gif" style="max-width:100%;" alt="Select parts and elements of a web page to monitor for changes" title="Select parts and elements of a web page to monitor for changes" />](https://changedetection.io?src=github)
|
||||
|
||||
### Easily see what changed, examine by word, line, or individual character.
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot-diff.png" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />](https://changedetection.io?src=github)
|
||||
|
||||
|
||||
#### Example use cases
|
||||
### Perform interactive browser steps
|
||||
|
||||
Fill in text boxes, click buttons and more, setup your changedetection scenario.
|
||||
|
||||
Using the **Browser Steps** configuration, add basic steps before performing change detection, such as logging into websites, adding a product to a cart, accept cookie logins, entering dates and refining searches.
|
||||
|
||||
[<img src="docs/browsersteps-anim.gif" style="max-width:100%;" alt="Website change detection with interactive browser steps, detect changes behind login and password, search queries and more" title="Website change detection with interactive browser steps, detect changes behind login and password, search queries and more" />](https://changedetection.io?src=github)
|
||||
|
||||
After **Browser Steps** have been run, then visit the **Visual Selector** tab to refine the content you're interested in.
|
||||
Requires Playwright to be enabled.
|
||||
|
||||
|
||||
### Example use cases
|
||||
|
||||
- Products and services have a change in pricing
|
||||
- _Out of stock notification_ and _Back In stock notification_
|
||||
- Monitor and track PDF file changes, know when a PDF file has text changes.
|
||||
- Governmental department updates (changes are often only on their websites)
|
||||
- New software releases, security advisories when you're not on their mailing list.
|
||||
- Festivals with changes
|
||||
- Discogs restock alerts and monitoring
|
||||
- Realestate listing changes
|
||||
- Know when your favourite whiskey is on sale, or other special deals are announced before anyone else
|
||||
- COVID related news from government websites
|
||||
@@ -42,14 +64,19 @@ Easily see what changed, examine by word, line, or individual character.
|
||||
- You have a very sensitive list of URLs to watch and you do _not_ want to use the paid alternatives. (Remember, _you_ are the product)
|
||||
- Get notified when certain keywords appear in Twitter search results
|
||||
- Proactively search for jobs, get notified when companies update their careers page, search job portals for keywords.
|
||||
- Get alerts when new job positions are open on Bamboo HR and other job platforms
|
||||
- Website defacement monitoring
|
||||
- Pokémon Card Restock Tracker / Pokémon TCG Tracker
|
||||
- RegTech - stay ahead of regulatory changes, regulatory compliance
|
||||
|
||||
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver and Playwright!</a>_
|
||||
|
||||
#### Key Features
|
||||
|
||||
- Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions!
|
||||
- Target elements with xPath and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
|
||||
- Target elements with xPath(1.0) and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
|
||||
- Switch between fast non-JS and Chrome JS based "fetchers"
|
||||
- Track changes in PDF files (Monitor text changed in the PDF, Also monitor PDF filesize and checksums)
|
||||
- Easily specify how often a site should be checked
|
||||
- Execute JS before extracting text (Good for logging in, see examples in the UI!)
|
||||
- Override Request Headers, Specify `POST` or `GET` and other methods
|
||||
@@ -59,27 +86,10 @@ _Need an actual Chrome runner with Javascript support? We support fetching via W
|
||||
|
||||
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $100 using our signup link.
|
||||
|
||||
## Screenshots
|
||||
[Oxylabs](https://oxylabs.go2cloud.org/SH2d) is also an excellent proxy provider and well worth using, they offer Residental, ISP, Rotating and many other proxy types to suit your project.
|
||||
|
||||
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||
|
||||
### Target specific parts of the webpage using the Visual Selector tool.
|
||||
|
||||
Available when connected to a <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Playwright-content-fetcher">playwright content fetcher</a> (included as part of our subscription service)
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/visualselector-anim.gif" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />
|
||||
|
||||
### Perform interactive browser steps
|
||||
|
||||
Fill in text boxes, click buttons and more, setup your changedetection scenario.
|
||||
|
||||
Using the **Browser Steps** configuration, add basic steps before performing change detection, such as logging into websites, adding a product to a cart, accept cookie logins, entering dates and refining searches.
|
||||
|
||||
<img src="docs/browsersteps-anim.gif" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Website change detection with interactive browser steps, login, cookies etc" />
|
||||
|
||||
After **Browser Steps** have been run, then visit the **Visual Selector** tab to refine the content you're interested in.
|
||||
Requires Playwright to be enabled.
|
||||
|
||||
## Installation
|
||||
|
||||
### Docker
|
||||
@@ -97,6 +107,8 @@ $ docker run -d --restart always -p "127.0.0.1:5000:5000" -v datastore-volume:/d
|
||||
|
||||
`:latest` tag is our latest stable release, `:dev` tag is our bleeding edge `master` branch.
|
||||
|
||||
Alternative docker repository over at ghcr - [ghcr.io/dgtlmoon/changedetection.io](https://ghcr.io/dgtlmoon/changedetection.io)
|
||||
|
||||
### Windows
|
||||
|
||||
See the install instructions at the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Microsoft-Windows
|
||||
@@ -135,8 +147,8 @@ See the wiki for more information https://github.com/dgtlmoon/changedetection.io
|
||||
|
||||
## Filters
|
||||
|
||||
XPath, JSONPath, jq, and CSS support comes baked in! You can be as specific as you need, use XPath exported from various XPath element query creation tools.
|
||||
(We support LXML `re:test`, `re:math` and `re:replace`.)
|
||||
XPath(1.0), JSONPath, jq, and CSS support comes baked in! You can be as specific as you need, use XPath exported from various XPath element query creation tools.
|
||||
(We support LXML `re:test`, `re:match` and `re:replace`.)
|
||||
|
||||
## Notifications
|
||||
|
||||
@@ -160,7 +172,7 @@ Just some examples
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot-notifications.png" style="max-width:100%;" alt="Self-hosted web page change monitoring notifications" title="Self-hosted web page change monitoring notifications" />
|
||||
|
||||
Now you can also customise your notification content!
|
||||
Now you can also customise your notification content and use <a target="_new" href="https://jinja.palletsprojects.com/en/3.0.x/templates/">Jinja2 templating</a> for their title and body!
|
||||
|
||||
## JSON API Monitoring
|
||||
|
||||
@@ -174,7 +186,7 @@ This will re-parse the JSON and apply formatting to the text, making it super ea
|
||||
|
||||
### JSONPath or jq?
|
||||
|
||||
For more complex parsing, filtering, and modifying of JSON data, jq is recommended due to the built-in operators and functions. Refer to the [documentation](https://stedolan.github.io/jq/manual/) for more specifc information on jq.
|
||||
For more complex parsing, filtering, and modifying of JSON data, jq is recommended due to the built-in operators and functions. Refer to the [documentation](https://stedolan.github.io/jq/manual/) for more specific information on jq.
|
||||
|
||||
One big advantage of `jq` is that you can use logic in your JSON filter, such as filters to only show items that have a value greater than/less than etc.
|
||||
|
||||
@@ -188,27 +200,55 @@ When you enable a `json:` or `jq:` filter, you can even automatically extract an
|
||||
<html>
|
||||
...
|
||||
<script type="application/ld+json">
|
||||
{"@context":"http://schema.org","@type":"Product","name":"Nan Optipro Stage 1 Baby Formula 800g","price": 23.50 }
|
||||
|
||||
{
|
||||
"@context":"http://schema.org/",
|
||||
"@type":"Product",
|
||||
"offers":{
|
||||
"@type":"Offer",
|
||||
"availability":"http://schema.org/InStock",
|
||||
"price":"3949.99",
|
||||
"priceCurrency":"USD",
|
||||
"url":"https://www.newegg.com/p/3D5-000D-001T1"
|
||||
},
|
||||
"description":"Cobratype King Cobra Hero Desktop Gaming PC",
|
||||
"name":"Cobratype King Cobra Hero Desktop Gaming PC",
|
||||
"sku":"3D5-000D-001T1",
|
||||
"itemCondition":"NewCondition"
|
||||
}
|
||||
</script>
|
||||
```
|
||||
|
||||
`json:$.price` or `jq:.price` would give `23.50`, or you can extract the whole structure
|
||||
`json:$..price` or `jq:..price` would give `3949.99`, or you can extract the whole structure (use a JSONpath test website to validate with)
|
||||
|
||||
The application also supports notifying you that it can follow this information automatically
|
||||
|
||||
|
||||
## Proxy Configuration
|
||||
|
||||
See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration , we also support using [BrightData proxy services where possible]( https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support)
|
||||
See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration , we also support using [Bright Data proxy services where possible](https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support) and [Oxylabs](https://oxylabs.go2cloud.org/SH2d) proxy services.
|
||||
|
||||
## Raspberry Pi support?
|
||||
|
||||
Raspberry Pi and linux/arm/v6 linux/arm/v7 arm64 devices are supported! See the wiki for [details](https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver)
|
||||
|
||||
## Import support
|
||||
|
||||
Easily [import your list of websites to watch for changes in Excel .xslx file format](https://changedetection.io/tutorial/how-import-your-website-change-detection-lists-excel), or paste in lists of website URLs as plaintext.
|
||||
|
||||
Excel import is recommended - that way you can better organise tags/groups of websites and other features.
|
||||
|
||||
|
||||
## API Support
|
||||
|
||||
Supports managing the website watch list [via our API](https://changedetection.io/docs/api_v1/index.html)
|
||||
|
||||
## Support us
|
||||
|
||||
Do you use changedetection.io to make money? does it save you time or money? Does it make your life easier? less stressful? Remember, we write this software when we should be doing actual paid work, we have to buy food and pay rent just like you.
|
||||
|
||||
|
||||
Firstly, consider taking out a [change detection monthly subscription - unlimited checks and watches](https://lemonade.changedetection.io/start) , even if you don't use it, you still get the warm fuzzy feeling of helping out the project. (And who knows, you might just use it!)
|
||||
Firstly, consider taking out a [change detection monthly subscription - unlimited checks and watches](https://changedetection.io?src=github) , even if you don't use it, you still get the warm fuzzy feeling of helping out the project. (And who knows, you might just use it!)
|
||||
|
||||
Or directly donate an amount PayPal [](https://www.paypal.com/donate/?hosted_button_id=7CP6HR9ZCNDYJ)
|
||||
|
||||
@@ -226,5 +266,5 @@ I offer commercial support, this software is depended on by network security, ae
|
||||
[test-shield]: https://github.com/dgtlmoon/changedetection.io/actions/workflows/test-only.yml/badge.svg?branch=master
|
||||
|
||||
[license-shield]: https://img.shields.io/github/license/dgtlmoon/changedetection.io.svg?style=for-the-badge
|
||||
[release-link]: https://github.com/dgtlmoon.com/changedetection.io/releases
|
||||
[release-link]: https://github.com/dgtlmoon/changedetection.io/releases
|
||||
[docker-link]: https://hub.docker.com/r/dgtlmoon/changedetection.io
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
from changedetectionio import changedetection
|
||||
import multiprocessing
|
||||
import signal
|
||||
import sys
|
||||
import os
|
||||
|
||||
def sigchld_handler(_signo, _stack_frame):
|
||||
@@ -35,6 +35,9 @@ if __name__ == '__main__':
|
||||
try:
|
||||
while True:
|
||||
time.sleep(1)
|
||||
if not parse_process.is_alive():
|
||||
# Process died/crashed for some reason, exit with error set
|
||||
sys.exit(1)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
#parse_process.terminate() not needed, because this process will issue it to the sub-process anyway
|
||||
|
||||
117
changedetectionio/api/api_schema.py
Normal file
@@ -0,0 +1,117 @@
|
||||
# Responsible for building the storage dict into a set of rules ("JSON Schema") acceptable via the API
|
||||
# Probably other ways to solve this when the backend switches to some ORM
|
||||
|
||||
def build_time_between_check_json_schema():
|
||||
# Setup time between check schema
|
||||
schema_properties_time_between_check = {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {}
|
||||
}
|
||||
for p in ['weeks', 'days', 'hours', 'minutes', 'seconds']:
|
||||
schema_properties_time_between_check['properties'][p] = {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "integer"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
return schema_properties_time_between_check
|
||||
|
||||
def build_watch_json_schema(d):
|
||||
# Base JSON schema
|
||||
schema = {
|
||||
'type': 'object',
|
||||
'properties': {},
|
||||
}
|
||||
|
||||
for k, v in d.items():
|
||||
# @todo 'integer' is not covered here because its almost always for internal usage
|
||||
|
||||
if isinstance(v, type(None)):
|
||||
schema['properties'][k] = {
|
||||
"anyOf": [
|
||||
{"type": "null"},
|
||||
]
|
||||
}
|
||||
elif isinstance(v, list):
|
||||
schema['properties'][k] = {
|
||||
"anyOf": [
|
||||
{"type": "array",
|
||||
# Always is an array of strings, like text or regex or something
|
||||
"items": {
|
||||
"type": "string",
|
||||
"maxLength": 5000
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
elif isinstance(v, bool):
|
||||
schema['properties'][k] = {
|
||||
"anyOf": [
|
||||
{"type": "boolean"},
|
||||
]
|
||||
}
|
||||
elif isinstance(v, str):
|
||||
schema['properties'][k] = {
|
||||
"anyOf": [
|
||||
{"type": "string",
|
||||
"maxLength": 5000},
|
||||
]
|
||||
}
|
||||
|
||||
# Can also be a string (or None by default above)
|
||||
for v in ['body',
|
||||
'notification_body',
|
||||
'notification_format',
|
||||
'notification_title',
|
||||
'proxy',
|
||||
'tag',
|
||||
'title',
|
||||
'webdriver_js_execute_code'
|
||||
]:
|
||||
schema['properties'][v]['anyOf'].append({'type': 'string', "maxLength": 5000})
|
||||
|
||||
# None or Boolean
|
||||
schema['properties']['track_ldjson_price_data']['anyOf'].append({'type': 'boolean'})
|
||||
|
||||
schema['properties']['method'] = {"type": "string",
|
||||
"enum": ["GET", "POST", "DELETE", "PUT"]
|
||||
}
|
||||
|
||||
schema['properties']['fetch_backend']['anyOf'].append({"type": "string",
|
||||
"enum": ["html_requests", "html_webdriver"]
|
||||
})
|
||||
|
||||
|
||||
|
||||
# All headers must be key/value type dict
|
||||
schema['properties']['headers'] = {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
# Should always be a string:string type value
|
||||
".*": {"type": "string"},
|
||||
}
|
||||
}
|
||||
|
||||
from changedetectionio.notification import valid_notification_formats
|
||||
|
||||
schema['properties']['notification_format'] = {'type': 'string',
|
||||
'enum': list(valid_notification_formats.keys())
|
||||
}
|
||||
|
||||
# Stuff that shouldn't be available but is just state-storage
|
||||
for v in ['previous_md5', 'last_error', 'has_ldjson_price_data', 'previous_md5_before_filters', 'uuid']:
|
||||
del schema['properties'][v]
|
||||
|
||||
schema['properties']['webdriver_delay']['anyOf'].append({'type': 'integer'})
|
||||
|
||||
schema['properties']['time_between_check'] = build_time_between_check_json_schema()
|
||||
|
||||
# headers ?
|
||||
return schema
|
||||
|
||||
@@ -1,11 +1,27 @@
|
||||
import os
|
||||
from distutils.util import strtobool
|
||||
|
||||
from flask_expects_json import expects_json
|
||||
from changedetectionio import queuedWatchMetaData
|
||||
from flask_restful import abort, Resource
|
||||
from flask import request, make_response
|
||||
import validators
|
||||
from . import auth
|
||||
import copy
|
||||
|
||||
# See docs/README.md for rebuilding the docs/apidoc information
|
||||
|
||||
from . import api_schema
|
||||
|
||||
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
|
||||
# Build a JSON Schema atleast partially based on our Watch model
|
||||
from changedetectionio.model.Watch import base_config as watch_base_config
|
||||
schema = api_schema.build_watch_json_schema(watch_base_config)
|
||||
|
||||
schema_create_watch = copy.deepcopy(schema)
|
||||
schema_create_watch['required'] = ['url']
|
||||
|
||||
schema_update_watch = copy.deepcopy(schema)
|
||||
schema_update_watch['additionalProperties'] = False
|
||||
|
||||
class Watch(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
@@ -15,30 +31,100 @@ class Watch(Resource):
|
||||
|
||||
# Get information about a single watch, excluding the history list (can be large)
|
||||
# curl http://localhost:4000/api/v1/watch/<string:uuid>
|
||||
# @todo - version2 - ?muted and ?paused should be able to be called together, return the watch struct not "OK"
|
||||
# ?recheck=true
|
||||
@auth.check_token
|
||||
def get(self, uuid):
|
||||
"""
|
||||
@api {get} /api/v1/watch/:uuid Single watch - get data, recheck, pause, mute.
|
||||
@apiDescription Retrieve watch information and set muted/paused status
|
||||
@apiExample {curl} Example usage:
|
||||
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -H"x-api-key:813031b16330fe25e3780cf0325daa45"
|
||||
curl "http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091?muted=unmuted" -H"x-api-key:813031b16330fe25e3780cf0325daa45"
|
||||
curl "http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091?paused=unpaused" -H"x-api-key:813031b16330fe25e3780cf0325daa45"
|
||||
@apiName Watch
|
||||
@apiGroup Watch
|
||||
@apiParam {uuid} uuid Watch unique ID.
|
||||
@apiQuery {Boolean} [recheck] Recheck this watch `recheck=1`
|
||||
@apiQuery {String} [paused] =`paused` or =`unpaused` , Sets the PAUSED state
|
||||
@apiQuery {String} [muted] =`muted` or =`unmuted` , Sets the MUTE NOTIFICATIONS state
|
||||
@apiSuccess (200) {String} OK When paused/muted/recheck operation OR full JSON object of the watch
|
||||
@apiSuccess (200) {JSON} WatchJSON JSON Full JSON object of the watch
|
||||
"""
|
||||
from copy import deepcopy
|
||||
watch = deepcopy(self.datastore.data['watching'].get(uuid))
|
||||
if not watch:
|
||||
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
|
||||
if request.args.get('recheck'):
|
||||
self.update_q.put((1, uuid))
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
|
||||
return "OK", 200
|
||||
if request.args.get('paused', '') == 'paused':
|
||||
self.datastore.data['watching'].get(uuid).pause()
|
||||
return "OK", 200
|
||||
elif request.args.get('paused', '') == 'unpaused':
|
||||
self.datastore.data['watching'].get(uuid).unpause()
|
||||
return "OK", 200
|
||||
if request.args.get('muted', '') == 'muted':
|
||||
self.datastore.data['watching'].get(uuid).mute()
|
||||
return "OK", 200
|
||||
elif request.args.get('muted', '') == 'unmuted':
|
||||
self.datastore.data['watching'].get(uuid).unmute()
|
||||
return "OK", 200
|
||||
|
||||
# Return without history, get that via another API call
|
||||
# Properties are not returned as a JSON, so add the required props manually
|
||||
watch['history_n'] = watch.history_n
|
||||
watch['last_changed'] = watch.last_changed
|
||||
|
||||
return watch
|
||||
|
||||
@auth.check_token
|
||||
def delete(self, uuid):
|
||||
"""
|
||||
@api {delete} /api/v1/watch/:uuid Delete a watch and related history
|
||||
@apiExample {curl} Example usage:
|
||||
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X DELETE -H"x-api-key:813031b16330fe25e3780cf0325daa45"
|
||||
@apiParam {uuid} uuid Watch unique ID.
|
||||
@apiName Delete
|
||||
@apiGroup Watch
|
||||
@apiSuccess (200) {String} OK Was deleted
|
||||
"""
|
||||
if not self.datastore.data['watching'].get(uuid):
|
||||
abort(400, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
|
||||
self.datastore.delete(uuid)
|
||||
return 'OK', 204
|
||||
|
||||
@auth.check_token
|
||||
@expects_json(schema_update_watch)
|
||||
def put(self, uuid):
|
||||
"""
|
||||
@api {put} /api/v1/watch/:uuid Update watch information
|
||||
@apiExample {curl} Example usage:
|
||||
Update (PUT)
|
||||
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X PUT -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"url": "https://my-nice.com" , "tag": "new list"}'
|
||||
|
||||
@apiDescription Updates an existing watch using JSON, accepts the same structure as returned in <a href="#api-Watch-Watch">get single watch information</a>
|
||||
@apiParam {uuid} uuid Watch unique ID.
|
||||
@apiName Update a watch
|
||||
@apiGroup Watch
|
||||
@apiSuccess (200) {String} OK Was updated
|
||||
@apiSuccess (500) {String} ERR Some other error
|
||||
"""
|
||||
watch = self.datastore.data['watching'].get(uuid)
|
||||
if not watch:
|
||||
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
|
||||
if request.json.get('proxy'):
|
||||
plist = self.datastore.proxy_list
|
||||
if not request.json.get('proxy') in plist:
|
||||
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
|
||||
|
||||
watch.update(request.json)
|
||||
|
||||
return "OK", 200
|
||||
|
||||
|
||||
class WatchHistory(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
@@ -48,6 +134,21 @@ class WatchHistory(Resource):
|
||||
# Get a list of available history for a watch by UUID
|
||||
# curl http://localhost:4000/api/v1/watch/<string:uuid>/history
|
||||
def get(self, uuid):
|
||||
"""
|
||||
@api {get} /api/v1/watch/<string:uuid>/history Get a list of all historical snapshots available for a watch
|
||||
@apiDescription Requires `uuid`, returns list
|
||||
@apiExample {curl} Example usage:
|
||||
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json"
|
||||
{
|
||||
"1676649279": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/cb7e9be8258368262246910e6a2a4c30.txt",
|
||||
"1677092785": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/e20db368d6fc633e34f559ff67bb4044.txt",
|
||||
"1677103794": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/02efdd37dacdae96554a8cc85dc9c945.txt"
|
||||
}
|
||||
@apiName Get list of available stored snapshots for watch
|
||||
@apiGroup Watch History
|
||||
@apiSuccess (200) {String} OK
|
||||
@apiSuccess (404) {String} ERR Not found
|
||||
"""
|
||||
watch = self.datastore.data['watching'].get(uuid)
|
||||
if not watch:
|
||||
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
@@ -59,11 +160,18 @@ class WatchSingleHistory(Resource):
|
||||
# datastore is a black box dependency
|
||||
self.datastore = kwargs['datastore']
|
||||
|
||||
# Read a given history snapshot and return its content
|
||||
# <string:timestamp> or "latest"
|
||||
# curl http://localhost:4000/api/v1/watch/<string:uuid>/history/<int:timestamp>
|
||||
@auth.check_token
|
||||
def get(self, uuid, timestamp):
|
||||
"""
|
||||
@api {get} /api/v1/watch/<string:uuid>/history/<int:timestamp> Get single snapshot from watch
|
||||
@apiDescription Requires watch `uuid` and `timestamp`. `timestamp` of "`latest`" for latest available snapshot, or <a href="#api-Watch_History-Get_list_of_available_stored_snapshots_for_watch">use the list returned here</a>
|
||||
@apiExample {curl} Example usage:
|
||||
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history/1677092977 -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json"
|
||||
@apiName Get single snapshot content
|
||||
@apiGroup Watch History
|
||||
@apiSuccess (200) {String} OK
|
||||
@apiSuccess (404) {String} ERR Not found
|
||||
"""
|
||||
watch = self.datastore.data['watching'].get(uuid)
|
||||
if not watch:
|
||||
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
@@ -74,8 +182,7 @@ class WatchSingleHistory(Resource):
|
||||
if timestamp == 'latest':
|
||||
timestamp = list(watch.history.keys())[-1]
|
||||
|
||||
with open(watch.history[timestamp], 'r') as f:
|
||||
content = f.read()
|
||||
content = watch.get_history_snapshot(timestamp)
|
||||
|
||||
response = make_response(content, 200)
|
||||
response.mimetype = "text/plain"
|
||||
@@ -89,36 +196,99 @@ class CreateWatch(Resource):
|
||||
self.update_q = kwargs['update_q']
|
||||
|
||||
@auth.check_token
|
||||
@expects_json(schema_create_watch)
|
||||
def post(self):
|
||||
# curl http://localhost:4000/api/v1/watch -H "Content-Type: application/json" -d '{"url": "https://my-nice.com", "tag": "one, two" }'
|
||||
json_data = request.get_json()
|
||||
tag = json_data['tag'].strip() if json_data.get('tag') else ''
|
||||
"""
|
||||
@api {post} /api/v1/watch Create a single watch
|
||||
@apiDescription Requires atleast `url` set, can accept the same structure as <a href="#api-Watch-Watch">get single watch information</a> to create.
|
||||
@apiExample {curl} Example usage:
|
||||
curl http://localhost:4000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"url": "https://my-nice.com" , "tag": "nice list"}'
|
||||
@apiName Create
|
||||
@apiGroup Watch
|
||||
@apiSuccess (200) {String} OK Was created
|
||||
@apiSuccess (500) {String} ERR Some other error
|
||||
"""
|
||||
|
||||
if not validators.url(json_data['url'].strip()):
|
||||
json_data = request.get_json()
|
||||
url = json_data['url'].strip()
|
||||
|
||||
# If hosts that only contain alphanumerics are allowed ("localhost" for example)
|
||||
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
|
||||
if not validators.url(url, simple_host=allow_simplehost):
|
||||
return "Invalid or unsupported URL", 400
|
||||
|
||||
extras = {'title': json_data['title'].strip()} if json_data.get('title') else {}
|
||||
if json_data.get('proxy'):
|
||||
plist = self.datastore.proxy_list
|
||||
if not json_data.get('proxy') in plist:
|
||||
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
|
||||
|
||||
new_uuid = self.datastore.add_watch(url=json_data['url'].strip(), tag=tag, extras=extras)
|
||||
self.update_q.put((1, new_uuid))
|
||||
return {'uuid': new_uuid}, 201
|
||||
extras = copy.deepcopy(json_data)
|
||||
|
||||
# Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API)
|
||||
tags = None
|
||||
if extras.get('tag'):
|
||||
tags = extras.get('tag')
|
||||
del extras['tag']
|
||||
|
||||
del extras['url']
|
||||
|
||||
new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags)
|
||||
if new_uuid:
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
|
||||
return {'uuid': new_uuid}, 201
|
||||
else:
|
||||
return "Invalid or unsupported URL", 400
|
||||
|
||||
# Return concise list of available watches and some very basic info
|
||||
# curl http://localhost:4000/api/v1/watch|python -mjson.tool
|
||||
# ?recheck_all=1 to recheck all
|
||||
@auth.check_token
|
||||
def get(self):
|
||||
"""
|
||||
@api {get} /api/v1/watch List watches
|
||||
@apiDescription Return concise list of available watches and some very basic info
|
||||
@apiExample {curl} Example usage:
|
||||
curl http://localhost:4000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45"
|
||||
{
|
||||
"6a4b7d5c-fee4-4616-9f43-4ac97046b595": {
|
||||
"last_changed": 1677103794,
|
||||
"last_checked": 1677103794,
|
||||
"last_error": false,
|
||||
"title": "",
|
||||
"url": "http://www.quotationspage.com/random.php"
|
||||
},
|
||||
"e6f5fd5c-dbfe-468b-b8f3-f9d6ff5ad69b": {
|
||||
"last_changed": 0,
|
||||
"last_checked": 1676662819,
|
||||
"last_error": false,
|
||||
"title": "QuickLook",
|
||||
"url": "https://github.com/QL-Win/QuickLook/tags"
|
||||
}
|
||||
}
|
||||
|
||||
@apiParam {String} [recheck_all] Optional Set to =1 to force recheck of all watches
|
||||
@apiParam {String} [tag] Optional name of tag to limit results
|
||||
@apiName ListWatches
|
||||
@apiGroup Watch Management
|
||||
@apiSuccess (200) {String} OK JSON dict
|
||||
"""
|
||||
list = {}
|
||||
for k, v in self.datastore.data['watching'].items():
|
||||
list[k] = {'url': v['url'],
|
||||
'title': v['title'],
|
||||
'last_checked': v['last_checked'],
|
||||
'last_changed': v.last_changed,
|
||||
'last_error': v['last_error']}
|
||||
|
||||
tag_limit = request.args.get('tag', '').lower()
|
||||
|
||||
|
||||
for uuid, watch in self.datastore.data['watching'].items():
|
||||
# Watch tags by name (replace the other calls?)
|
||||
tags = self.datastore.get_all_tags_for_watch(uuid=uuid)
|
||||
if tag_limit and not any(v.get('title').lower() == tag_limit for k, v in tags.items()):
|
||||
continue
|
||||
|
||||
list[uuid] = {'url': watch['url'],
|
||||
'title': watch['title'],
|
||||
'last_checked': watch['last_checked'],
|
||||
'last_changed': watch.last_changed,
|
||||
'last_error': watch['last_error']}
|
||||
|
||||
if request.args.get('recheck_all'):
|
||||
for uuid in self.datastore.data['watching'].keys():
|
||||
self.update_q.put((1, uuid))
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
|
||||
return {'status': "OK"}, 200
|
||||
|
||||
return list, 200
|
||||
@@ -131,6 +301,22 @@ class SystemInfo(Resource):
|
||||
|
||||
@auth.check_token
|
||||
def get(self):
|
||||
"""
|
||||
@api {get} /api/v1/systeminfo Return system info
|
||||
@apiDescription Return some info about the current system state
|
||||
@apiExample {curl} Example usage:
|
||||
curl http://localhost:4000/api/v1/systeminfo -H"x-api-key:813031b16330fe25e3780cf0325daa45"
|
||||
HTTP/1.0 200
|
||||
{
|
||||
'queue_size': 10 ,
|
||||
'overdue_watches': ["watch-uuid-list"],
|
||||
'uptime': 38344.55,
|
||||
'watch_count': 800,
|
||||
'version': "0.40.1"
|
||||
}
|
||||
@apiName Get Info
|
||||
@apiGroup System Information
|
||||
"""
|
||||
import time
|
||||
overdue_watches = []
|
||||
|
||||
@@ -149,10 +335,11 @@ class SystemInfo(Resource):
|
||||
# Allow 5 minutes of grace time before we decide it's overdue
|
||||
if time_since_check - (5 * 60) > t:
|
||||
overdue_watches.append(uuid)
|
||||
|
||||
from changedetectionio import __version__ as main_version
|
||||
return {
|
||||
'queue_size': self.update_q.qsize(),
|
||||
'overdue_watches': overdue_watches,
|
||||
'uptime': round(time.time() - self.datastore.start_time, 2),
|
||||
'watch_count': len(self.datastore.data.get('watching', {}))
|
||||
'watch_count': len(self.datastore.data.get('watching', {})),
|
||||
'version': main_version
|
||||
}, 200
|
||||
|
||||
@@ -21,71 +21,141 @@
|
||||
# OR
|
||||
# - use multiprocessing to bump this over to its own process and add some transport layer (queue/pipes)
|
||||
|
||||
|
||||
|
||||
|
||||
from distutils.util import strtobool
|
||||
from flask import Blueprint, request, make_response
|
||||
from flask_login import login_required
|
||||
import os
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
from changedetectionio import login_optionally_required
|
||||
|
||||
browsersteps_live_ui_o = {}
|
||||
browsersteps_playwright_browser_interface = None
|
||||
browsersteps_playwright_browser_interface_start_time = None
|
||||
browsersteps_playwright_browser_interface_browser = None
|
||||
browsersteps_playwright_browser_interface_end_time = None
|
||||
browsersteps_sessions = {}
|
||||
io_interface_context = None
|
||||
|
||||
|
||||
def cleanup_playwright_session():
|
||||
print("Cleaning up old playwright session because time was up")
|
||||
global browsersteps_playwright_browser_interface
|
||||
global browsersteps_live_ui_o
|
||||
global browsersteps_playwright_browser_interface_browser
|
||||
global browsersteps_playwright_browser_interface
|
||||
global browsersteps_playwright_browser_interface_start_time
|
||||
global browsersteps_playwright_browser_interface_end_time
|
||||
|
||||
import psutil
|
||||
|
||||
current_process = psutil.Process()
|
||||
children = current_process.children(recursive=True)
|
||||
for child in children:
|
||||
print (child)
|
||||
print('Child pid is {}'.format(child.pid))
|
||||
|
||||
# .stop() hangs sometimes if its called when there are no children to process
|
||||
# but how do we know this is our child? dunno
|
||||
if children:
|
||||
browsersteps_playwright_browser_interface.stop()
|
||||
|
||||
browsersteps_live_ui_o = {}
|
||||
browsersteps_playwright_browser_interface = None
|
||||
browsersteps_playwright_browser_interface_start_time = None
|
||||
browsersteps_playwright_browser_interface_browser = None
|
||||
browsersteps_playwright_browser_interface_end_time = None
|
||||
print ("Cleaning up old playwright session because time was up - done")
|
||||
|
||||
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
|
||||
browser_steps_blueprint = Blueprint('browser_steps', __name__, template_folder="templates")
|
||||
|
||||
@login_required
|
||||
@browser_steps_blueprint.route("/browsersteps_update", methods=['GET', 'POST'])
|
||||
def start_browsersteps_session(watch_uuid):
|
||||
from . import nonContext
|
||||
from . import browser_steps
|
||||
import time
|
||||
global browsersteps_sessions
|
||||
global io_interface_context
|
||||
|
||||
|
||||
# We keep the playwright session open for many minutes
|
||||
keepalive_seconds = int(os.getenv('BROWSERSTEPS_MINUTES_KEEPALIVE', 10)) * 60
|
||||
|
||||
browsersteps_start_session = {'start_time': time.time()}
|
||||
|
||||
# You can only have one of these running
|
||||
# This should be very fine to leave running for the life of the application
|
||||
# @idea - Make it global so the pool of watch fetchers can use it also
|
||||
if not io_interface_context:
|
||||
io_interface_context = nonContext.c_sync_playwright()
|
||||
# Start the Playwright context, which is actually a nodejs sub-process and communicates over STDIN/STDOUT pipes
|
||||
io_interface_context = io_interface_context.start()
|
||||
|
||||
keepalive_ms = ((keepalive_seconds + 3) * 1000)
|
||||
base_url = os.getenv('PLAYWRIGHT_DRIVER_URL', '')
|
||||
a = "?" if not '?' in base_url else '&'
|
||||
base_url += a + f"timeout={keepalive_ms}"
|
||||
|
||||
try:
|
||||
browsersteps_start_session['browser'] = io_interface_context.chromium.connect_over_cdp(base_url)
|
||||
except Exception as e:
|
||||
if 'ECONNREFUSED' in str(e):
|
||||
return make_response('Unable to start the Playwright Browser session, is it running?', 401)
|
||||
else:
|
||||
# Other errors, bad URL syntax, bad reply etc
|
||||
return make_response(str(e), 401)
|
||||
|
||||
proxy_id = datastore.get_preferred_proxy_for_watch(uuid=watch_uuid)
|
||||
proxy = None
|
||||
if proxy_id:
|
||||
proxy_url = datastore.proxy_list.get(proxy_id).get('url')
|
||||
if proxy_url:
|
||||
|
||||
# Playwright needs separate username and password values
|
||||
from urllib.parse import urlparse
|
||||
parsed = urlparse(proxy_url)
|
||||
proxy = {'server': proxy_url}
|
||||
|
||||
if parsed.username:
|
||||
proxy['username'] = parsed.username
|
||||
|
||||
if parsed.password:
|
||||
proxy['password'] = parsed.password
|
||||
|
||||
print("Browser Steps: UUID {} selected proxy {}".format(watch_uuid, proxy_url))
|
||||
|
||||
# Tell Playwright to connect to Chrome and setup a new session via our stepper interface
|
||||
browsersteps_start_session['browserstepper'] = browser_steps.browsersteps_live_ui(
|
||||
playwright_browser=browsersteps_start_session['browser'],
|
||||
proxy=proxy)
|
||||
|
||||
# For test
|
||||
#browsersteps_start_session['browserstepper'].action_goto_url(value="http://example.com?time="+str(time.time()))
|
||||
|
||||
return browsersteps_start_session
|
||||
|
||||
|
||||
@login_optionally_required
|
||||
@browser_steps_blueprint.route("/browsersteps_start_session", methods=['GET'])
|
||||
def browsersteps_start_session():
|
||||
# A new session was requested, return sessionID
|
||||
|
||||
import uuid
|
||||
global browsersteps_sessions
|
||||
|
||||
browsersteps_session_id = str(uuid.uuid4())
|
||||
watch_uuid = request.args.get('uuid')
|
||||
|
||||
if not watch_uuid:
|
||||
return make_response('No Watch UUID specified', 500)
|
||||
|
||||
print("Starting connection with playwright")
|
||||
logging.debug("browser_steps.py connecting")
|
||||
browsersteps_sessions[browsersteps_session_id] = start_browsersteps_session(watch_uuid)
|
||||
print("Starting connection with playwright - done")
|
||||
return {'browsersteps_session_id': browsersteps_session_id}
|
||||
|
||||
@login_optionally_required
|
||||
@browser_steps_blueprint.route("/browsersteps_image", methods=['GET'])
|
||||
def browser_steps_fetch_screenshot_image():
|
||||
from flask import (
|
||||
make_response,
|
||||
request,
|
||||
send_from_directory,
|
||||
)
|
||||
uuid = request.args.get('uuid')
|
||||
step_n = int(request.args.get('step_n'))
|
||||
|
||||
watch = datastore.data['watching'].get(uuid)
|
||||
filename = f"step_before-{step_n}.jpeg" if request.args.get('type', '') == 'before' else f"step_{step_n}.jpeg"
|
||||
|
||||
if step_n and watch and os.path.isfile(os.path.join(watch.watch_data_dir, filename)):
|
||||
response = make_response(send_from_directory(directory=watch.watch_data_dir, path=filename))
|
||||
response.headers['Content-type'] = 'image/jpeg'
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
response.headers['Expires'] = 0
|
||||
return response
|
||||
|
||||
else:
|
||||
return make_response('Unable to fetch image, is the URL correct? does the watch exist? does the step_type-n.jpeg exist?', 401)
|
||||
|
||||
# A request for an action was received
|
||||
@login_optionally_required
|
||||
@browser_steps_blueprint.route("/browsersteps_update", methods=['POST'])
|
||||
def browsersteps_ui_update():
|
||||
import base64
|
||||
import playwright._impl._api_types
|
||||
import time
|
||||
|
||||
global browsersteps_sessions
|
||||
from changedetectionio.blueprint.browser_steps import browser_steps
|
||||
|
||||
global browsersteps_live_ui_o, browsersteps_playwright_browser_interface_end_time
|
||||
global browsersteps_playwright_browser_interface_browser
|
||||
global browsersteps_playwright_browser_interface
|
||||
global browsersteps_playwright_browser_interface_start_time
|
||||
|
||||
step_n = None
|
||||
remaining =0
|
||||
uuid = request.args.get('uuid')
|
||||
|
||||
@@ -94,16 +164,8 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
if not browsersteps_session_id:
|
||||
return make_response('No browsersteps_session_id specified', 500)
|
||||
|
||||
# Because we don't "really" run in a context manager ( we make the playwright interface global/long-living )
|
||||
# We need to manage the shutdown when the time is up
|
||||
if browsersteps_playwright_browser_interface_end_time:
|
||||
remaining = browsersteps_playwright_browser_interface_end_time-time.time()
|
||||
if browsersteps_playwright_browser_interface_end_time and remaining <= 0:
|
||||
|
||||
|
||||
cleanup_playwright_session()
|
||||
|
||||
return make_response('Browser session expired, please reload the Browser Steps interface', 500)
|
||||
if not browsersteps_sessions.get(browsersteps_session_id):
|
||||
return make_response('No session exists under that ID', 500)
|
||||
|
||||
|
||||
# Actions - step/apply/etc, do the thing and return state
|
||||
@@ -117,109 +179,63 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
|
||||
if step_operation == 'Goto site':
|
||||
step_operation = 'goto_url'
|
||||
step_optional_value = None
|
||||
step_selector = datastore.data['watching'][uuid].get('url')
|
||||
step_optional_value = datastore.data['watching'][uuid].get('url')
|
||||
step_selector = None
|
||||
|
||||
# @todo try.. accept.. nice errors not popups..
|
||||
try:
|
||||
|
||||
this_session = browsersteps_live_ui_o.get(browsersteps_session_id)
|
||||
if not this_session:
|
||||
print("Browser exited")
|
||||
return make_response('Browser session ran out of time :( Please reload this page.', 401)
|
||||
|
||||
this_session.call_action(action_name=step_operation,
|
||||
browsersteps_sessions[browsersteps_session_id]['browserstepper'].call_action(action_name=step_operation,
|
||||
selector=step_selector,
|
||||
optional_value=step_optional_value)
|
||||
except playwright._impl._api_types.TimeoutError as e:
|
||||
print("Element wasnt found :-(", step_operation)
|
||||
return make_response("Element was not found on page", 401)
|
||||
|
||||
except playwright._impl._api_types.Error as e:
|
||||
# Browser/playwright level error
|
||||
print("Browser error - got playwright._impl._api_types.Error, try reloading the session/browser")
|
||||
print (str(e))
|
||||
|
||||
except Exception as e:
|
||||
print("Exception when calling step operation", step_operation, str(e))
|
||||
# Try to find something of value to give back to the user
|
||||
for l in str(e).splitlines():
|
||||
if 'DOMException' in l:
|
||||
return make_response(l, 401)
|
||||
|
||||
return make_response('Browser session ran out of time :( Please reload this page.', 401)
|
||||
return make_response(str(e).splitlines()[0], 401)
|
||||
|
||||
# Get visual selector ready/update its data (also use the current filter info from the page?)
|
||||
# When the last 'apply' button was pressed
|
||||
# @todo this adds overhead because the xpath selection is happening twice
|
||||
u = this_session.page.url
|
||||
u = browsersteps_sessions[browsersteps_session_id]['browserstepper'].page.url
|
||||
if is_last_step and u:
|
||||
(screenshot, xpath_data) = this_session.request_visualselector_data()
|
||||
(screenshot, xpath_data) = browsersteps_sessions[browsersteps_session_id]['browserstepper'].request_visualselector_data()
|
||||
datastore.save_screenshot(watch_uuid=uuid, screenshot=screenshot)
|
||||
datastore.save_xpath_data(watch_uuid=uuid, data=xpath_data)
|
||||
|
||||
# Setup interface
|
||||
if request.method == 'GET':
|
||||
|
||||
if not browsersteps_playwright_browser_interface:
|
||||
print("Starting connection with playwright")
|
||||
logging.debug("browser_steps.py connecting")
|
||||
from playwright.sync_api import sync_playwright
|
||||
|
||||
browsersteps_playwright_browser_interface = sync_playwright().start()
|
||||
|
||||
|
||||
time.sleep(1)
|
||||
# At 20 minutes, some other variable is closing it
|
||||
# @todo find out what it is and set it
|
||||
seconds_keepalive = int(os.getenv('BROWSERSTEPS_MINUTES_KEEPALIVE', 10)) * 60
|
||||
|
||||
# keep it alive for 10 seconds more than we advertise, sometimes it helps to keep it shutting down cleanly
|
||||
keepalive = "&timeout={}".format(((seconds_keepalive+3) * 1000))
|
||||
try:
|
||||
browsersteps_playwright_browser_interface_browser = browsersteps_playwright_browser_interface.chromium.connect_over_cdp(
|
||||
os.getenv('PLAYWRIGHT_DRIVER_URL', '') + keepalive)
|
||||
except Exception as e:
|
||||
if 'ECONNREFUSED' in str(e):
|
||||
return make_response('Unable to start the Playwright session properly, is it running?', 401)
|
||||
|
||||
browsersteps_playwright_browser_interface_end_time = time.time() + (seconds_keepalive-3)
|
||||
print("Starting connection with playwright - done")
|
||||
|
||||
if not browsersteps_live_ui_o.get(browsersteps_session_id):
|
||||
# Boot up a new session
|
||||
proxy_id = datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
||||
proxy = None
|
||||
if proxy_id:
|
||||
proxy_url = datastore.proxy_list.get(proxy_id).get('url')
|
||||
if proxy_url:
|
||||
proxy = {'server': proxy_url}
|
||||
print("Browser Steps: UUID {} Using proxy {}".format(uuid, proxy_url))
|
||||
|
||||
# Begin the new "Playwright Context" that re-uses the playwright interface
|
||||
# Each session is a "Playwright Context" as a list, that uses the playwright interface
|
||||
browsersteps_live_ui_o[browsersteps_session_id] = browser_steps.browsersteps_live_ui(
|
||||
playwright_browser=browsersteps_playwright_browser_interface_browser,
|
||||
proxy=proxy)
|
||||
this_session = browsersteps_live_ui_o[browsersteps_session_id]
|
||||
|
||||
if not this_session.page:
|
||||
cleanup_playwright_session()
|
||||
return make_response('Browser session ran out of time :( Please reload this page.', 401)
|
||||
# if not this_session.page:
|
||||
# cleanup_playwright_session()
|
||||
# return make_response('Browser session ran out of time :( Please reload this page.', 401)
|
||||
|
||||
# Screenshots and other info only needed on requesting a step (POST)
|
||||
try:
|
||||
state = this_session.get_current_state()
|
||||
state = browsersteps_sessions[browsersteps_session_id]['browserstepper'].get_current_state()
|
||||
except playwright._impl._api_types.Error as e:
|
||||
return make_response("Browser session ran out of time :( Please reload this page."+str(e), 401)
|
||||
|
||||
p = {'screenshot': "data:image/png;base64,{}".format(
|
||||
# Use send_file() which is way faster than read/write loop on bytes
|
||||
import json
|
||||
from tempfile import mkstemp
|
||||
from flask import send_file
|
||||
tmp_fd, tmp_file = mkstemp(text=True, suffix=".json", prefix="changedetectionio-")
|
||||
|
||||
output = json.dumps({'screenshot': "data:image/jpeg;base64,{}".format(
|
||||
base64.b64encode(state[0]).decode('ascii')),
|
||||
'xpath_data': state[1],
|
||||
'session_age_start': this_session.age_start,
|
||||
'session_age_start': browsersteps_sessions[browsersteps_session_id]['browserstepper'].age_start,
|
||||
'browser_time_remaining': round(remaining)
|
||||
}
|
||||
})
|
||||
|
||||
with os.fdopen(tmp_fd, 'w') as f:
|
||||
f.write(output)
|
||||
|
||||
# @todo BSON/binary JSON, faster xfer, OR pick it off the disk
|
||||
return p
|
||||
response = make_response(send_file(path_or_file=tmp_file,
|
||||
mimetype='application/json; charset=UTF-8',
|
||||
etag=True))
|
||||
# No longer needed
|
||||
os.unlink(tmp_file)
|
||||
|
||||
return response
|
||||
|
||||
return browser_steps_blueprint
|
||||
|
||||
|
||||
@@ -22,14 +22,17 @@ browser_step_ui_config = {'Choose one': '0 0',
|
||||
'Click element': '1 0',
|
||||
'Click element containing text': '0 1',
|
||||
'Enter text in field': '1 1',
|
||||
'Execute JS': '0 1',
|
||||
# 'Extract text and use as filter': '1 0',
|
||||
'Goto site': '0 0',
|
||||
'Goto URL': '0 1',
|
||||
'Press Enter': '0 0',
|
||||
'Select by label': '1 1',
|
||||
'Scroll down': '0 0',
|
||||
'Uncheck checkbox': '1 0',
|
||||
'Wait for seconds': '0 1',
|
||||
'Wait for text': '0 1',
|
||||
'Wait for text in element': '1 1',
|
||||
# 'Press Page Down': '0 0',
|
||||
# 'Press Page Up': '0 0',
|
||||
# weird bug, come back to it later
|
||||
@@ -52,7 +55,7 @@ class steppable_browser_interface():
|
||||
|
||||
print("> action calling", call_action_name)
|
||||
# https://playwright.dev/python/docs/selectors#xpath-selectors
|
||||
if selector.startswith('/') and not selector.startswith('//'):
|
||||
if selector and selector.startswith('/') and not selector.startswith('//'):
|
||||
selector = "xpath=" + selector
|
||||
|
||||
action_handler = getattr(self, "action_" + call_action_name)
|
||||
@@ -68,28 +71,26 @@ class steppable_browser_interface():
|
||||
optional_value = str(jinja2_env.from_string(optional_value).render())
|
||||
|
||||
action_handler(selector, optional_value)
|
||||
self.page.wait_for_timeout(3 * 1000)
|
||||
self.page.wait_for_timeout(1.5 * 1000)
|
||||
print("Call action done in", time.time() - now)
|
||||
|
||||
def action_goto_url(self, url, optional_value):
|
||||
def action_goto_url(self, selector=None, value=None):
|
||||
# self.page.set_viewport_size({"width": 1280, "height": 5000})
|
||||
now = time.time()
|
||||
response = self.page.goto(url, timeout=0, wait_until='domcontentloaded')
|
||||
print("Time to goto URL", time.time() - now)
|
||||
|
||||
# Wait_until = commit
|
||||
# - `'commit'` - consider operation to be finished when network response is received and the document started loading.
|
||||
# Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
|
||||
# This seemed to solve nearly all 'TimeoutErrors'
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5))
|
||||
self.page.wait_for_timeout(extra_wait * 1000)
|
||||
response = self.page.goto(value, timeout=0, wait_until='load')
|
||||
# Should be the same as the puppeteer_fetch.js methods, means, load with no timeout set (skip timeout)
|
||||
#and also wait for seconds ?
|
||||
#await page.waitForTimeout(1000);
|
||||
#await page.waitForTimeout(extra_wait_ms);
|
||||
print("Time to goto URL ", time.time() - now)
|
||||
return response
|
||||
|
||||
def action_click_element_containing_text(self, selector=None, value=''):
|
||||
if not len(value.strip()):
|
||||
return
|
||||
elem = self.page.get_by_text(value)
|
||||
if elem.count():
|
||||
elem.first.click(delay=randint(200, 500))
|
||||
elem.first.click(delay=randint(200, 500), timeout=3000)
|
||||
|
||||
def action_enter_text_in_field(self, selector, value):
|
||||
if not len(selector.strip()):
|
||||
@@ -97,11 +98,16 @@ class steppable_browser_interface():
|
||||
|
||||
self.page.fill(selector, value, timeout=10 * 1000)
|
||||
|
||||
def action_execute_js(self, selector, value):
|
||||
response = self.page.evaluate(value)
|
||||
return response
|
||||
|
||||
def action_click_element(self, selector, value):
|
||||
print("Clicking element")
|
||||
if not len(selector.strip()):
|
||||
return
|
||||
self.page.click(selector, timeout=10 * 1000, delay=randint(200, 500))
|
||||
|
||||
self.page.click(selector=selector, timeout=30 * 1000, delay=randint(200, 500))
|
||||
|
||||
def action_click_element_if_exists(self, selector, value):
|
||||
import playwright._impl._api_types as _api_types
|
||||
@@ -128,7 +134,18 @@ class steppable_browser_interface():
|
||||
self.page.wait_for_timeout(1000)
|
||||
|
||||
def action_wait_for_seconds(self, selector, value):
|
||||
self.page.wait_for_timeout(int(value) * 1000)
|
||||
self.page.wait_for_timeout(float(value.strip()) * 1000)
|
||||
|
||||
def action_wait_for_text(self, selector, value):
|
||||
import json
|
||||
v = json.dumps(value)
|
||||
self.page.wait_for_function(f'document.querySelector("body").innerText.includes({v});', timeout=30000)
|
||||
|
||||
def action_wait_for_text_in_element(self, selector, value):
|
||||
import json
|
||||
s = json.dumps(selector)
|
||||
v = json.dumps(value)
|
||||
self.page.wait_for_function(f'document.querySelector({s}).innerText.includes({v});', timeout=30000)
|
||||
|
||||
# @todo - in the future make some popout interface to capture what needs to be set
|
||||
# https://playwright.dev/python/docs/api/class-keyboard
|
||||
@@ -142,10 +159,10 @@ class steppable_browser_interface():
|
||||
self.page.keyboard.press("PageDown", delay=randint(200, 500))
|
||||
|
||||
def action_check_checkbox(self, selector, value):
|
||||
self.page.locator(selector).check()
|
||||
self.page.locator(selector).check(timeout=1000)
|
||||
|
||||
def action_uncheck_checkbox(self, selector, value):
|
||||
self.page.locator(selector).uncheck()
|
||||
self.page.locator(selector, timeout=1000).uncheck(timeout=1000)
|
||||
|
||||
|
||||
# Responsible for maintaining a live 'context' with browserless
|
||||
@@ -207,7 +224,7 @@ class browsersteps_live_ui(steppable_browser_interface):
|
||||
# Listen for all console events and handle errors
|
||||
self.page.on("console", lambda msg: print(f"Browser steps console - {msg.type}: {msg.text} {msg.args}"))
|
||||
|
||||
print("time to browser setup", time.time() - now)
|
||||
print("Time to browser setup", time.time() - now)
|
||||
self.page.wait_for_timeout(1 * 1000)
|
||||
|
||||
def mark_as_closed(self):
|
||||
@@ -232,7 +249,7 @@ class browsersteps_live_ui(steppable_browser_interface):
|
||||
self.page.evaluate("var include_filters=''")
|
||||
# Go find the interactive elements
|
||||
# @todo in the future, something smarter that can scan for elements with .click/focus etc event handlers?
|
||||
elements = 'a,button,input,select,textarea,i,th,td,p,li,h1,h2,h3,h4'
|
||||
elements = 'a,button,input,select,textarea,i,th,td,p,li,h1,h2,h3,h4,div,span'
|
||||
xpath_element_js = xpath_element_js.replace('%ELEMENTS%', elements)
|
||||
xpath_data = self.page.evaluate("async () => {" + xpath_element_js + "}")
|
||||
# So the JS will find the smallest one first
|
||||
|
||||
18
changedetectionio/blueprint/browser_steps/nonContext.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from playwright.sync_api import PlaywrightContextManager
|
||||
import asyncio
|
||||
|
||||
# So playwright wants to run as a context manager, but we do something horrible and hacky
|
||||
# we are holding the session open for as long as possible, then shutting it down, and opening a new one
|
||||
# So it means we don't get to use PlaywrightContextManager' __enter__ __exit__
|
||||
# To work around this, make goodbye() act the same as the __exit__()
|
||||
#
|
||||
# But actually I think this is because the context is opened correctly with __enter__() but we timeout the connection
|
||||
# then theres some lock condition where we cant destroy it without it hanging
|
||||
|
||||
class c_PlaywrightContextManager(PlaywrightContextManager):
|
||||
|
||||
def goodbye(self) -> None:
|
||||
self.__exit__()
|
||||
|
||||
def c_sync_playwright() -> PlaywrightContextManager:
|
||||
return c_PlaywrightContextManager()
|
||||
118
changedetectionio/blueprint/check_proxies/__init__.py
Normal file
@@ -0,0 +1,118 @@
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from flask import Blueprint
|
||||
from flask_login import login_required
|
||||
|
||||
from changedetectionio.processors import text_json_diff
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
|
||||
|
||||
STATUS_CHECKING = 0
|
||||
STATUS_FAILED = 1
|
||||
STATUS_OK = 2
|
||||
THREADPOOL_MAX_WORKERS = 3
|
||||
_DEFAULT_POOL = ThreadPoolExecutor(max_workers=THREADPOOL_MAX_WORKERS)
|
||||
|
||||
|
||||
# Maybe use fetch-time if its >5 to show some expected load time?
|
||||
def threadpool(f, executor=None):
|
||||
@wraps(f)
|
||||
def wrap(*args, **kwargs):
|
||||
return (executor or _DEFAULT_POOL).submit(f, *args, **kwargs)
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
check_proxies_blueprint = Blueprint('check_proxies', __name__)
|
||||
checks_in_progress = {}
|
||||
|
||||
@threadpool
|
||||
def long_task(uuid, preferred_proxy):
|
||||
import time
|
||||
from changedetectionio import content_fetcher
|
||||
|
||||
status = {'status': '', 'length': 0, 'text': ''}
|
||||
from jinja2 import Environment, BaseLoader
|
||||
|
||||
contents = ''
|
||||
now = time.time()
|
||||
try:
|
||||
update_handler = text_json_diff.perform_site_check(datastore=datastore)
|
||||
changed_detected, update_obj, contents = update_handler.run(uuid, preferred_proxy=preferred_proxy, skip_when_checksum_same=False)
|
||||
# title, size is len contents not len xfer
|
||||
except content_fetcher.Non200ErrorCodeReceived as e:
|
||||
if e.status_code == 404:
|
||||
status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but 404 (page not found)"})
|
||||
elif e.status_code == 403 or e.status_code == 401:
|
||||
status.update({'status': 'ERROR', 'length': len(contents), 'text': f"{e.status_code} - Access denied"})
|
||||
else:
|
||||
status.update({'status': 'ERROR', 'length': len(contents), 'text': f"Status code: {e.status_code}"})
|
||||
except text_json_diff.FilterNotFoundInResponse:
|
||||
status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but CSS/xPath filter not found (page changed layout?)"})
|
||||
except content_fetcher.EmptyReply as e:
|
||||
if e.status_code == 403 or e.status_code == 401:
|
||||
status.update({'status': 'ERROR OTHER', 'length': len(contents), 'text': f"Got empty reply with code {e.status_code} - Access denied"})
|
||||
else:
|
||||
status.update({'status': 'ERROR OTHER', 'length': len(contents) if contents else 0, 'text': f"Empty reply with code {e.status_code}, needs chrome?"})
|
||||
except content_fetcher.ReplyWithContentButNoText as e:
|
||||
txt = f"Got reply but with no content - Status code {e.status_code} - It's possible that the filters were found, but contained no usable text (or contained only an image)."
|
||||
status.update({'status': 'ERROR', 'text': txt})
|
||||
except Exception as e:
|
||||
status.update({'status': 'ERROR OTHER', 'length': len(contents) if contents else 0, 'text': 'Error: '+type(e).__name__+str(e)})
|
||||
else:
|
||||
status.update({'status': 'OK', 'length': len(contents), 'text': ''})
|
||||
|
||||
if status.get('text'):
|
||||
status['text'] = Environment(loader=BaseLoader()).from_string('{{text|e}}').render({'text': status['text']})
|
||||
|
||||
status['time'] = "{:.2f}s".format(time.time() - now)
|
||||
|
||||
return status
|
||||
|
||||
def _recalc_check_status(uuid):
|
||||
|
||||
results = {}
|
||||
for k, v in checks_in_progress.get(uuid, {}).items():
|
||||
try:
|
||||
r_1 = v.result(timeout=0.05)
|
||||
except Exception as e:
|
||||
# If timeout error?
|
||||
results[k] = {'status': 'RUNNING'}
|
||||
|
||||
else:
|
||||
results[k] = r_1
|
||||
|
||||
return results
|
||||
|
||||
@login_required
|
||||
@check_proxies_blueprint.route("/<string:uuid>/status", methods=['GET'])
|
||||
def get_recheck_status(uuid):
|
||||
results = _recalc_check_status(uuid=uuid)
|
||||
return results
|
||||
|
||||
@login_required
|
||||
@check_proxies_blueprint.route("/<string:uuid>/start", methods=['GET'])
|
||||
def start_check(uuid):
|
||||
|
||||
if not datastore.proxy_list:
|
||||
return
|
||||
|
||||
if checks_in_progress.get(uuid):
|
||||
state = _recalc_check_status(uuid=uuid)
|
||||
for proxy_key, v in state.items():
|
||||
if v.get('status') == 'RUNNING':
|
||||
return state
|
||||
else:
|
||||
checks_in_progress[uuid] = {}
|
||||
|
||||
for k, v in datastore.proxy_list.items():
|
||||
if not checks_in_progress[uuid].get(k):
|
||||
checks_in_progress[uuid][k] = long_task(uuid=uuid, preferred_proxy=k)
|
||||
|
||||
results = _recalc_check_status(uuid=uuid)
|
||||
return results
|
||||
|
||||
return check_proxies_blueprint
|
||||
33
changedetectionio/blueprint/price_data_follower/__init__.py
Normal file
@@ -0,0 +1,33 @@
|
||||
|
||||
from distutils.util import strtobool
|
||||
from flask import Blueprint, flash, redirect, url_for
|
||||
from flask_login import login_required
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
from changedetectionio import queuedWatchMetaData
|
||||
from queue import PriorityQueue
|
||||
|
||||
PRICE_DATA_TRACK_ACCEPT = 'accepted'
|
||||
PRICE_DATA_TRACK_REJECT = 'rejected'
|
||||
|
||||
def construct_blueprint(datastore: ChangeDetectionStore, update_q: PriorityQueue):
|
||||
|
||||
price_data_follower_blueprint = Blueprint('price_data_follower', __name__)
|
||||
|
||||
@login_required
|
||||
@price_data_follower_blueprint.route("/<string:uuid>/accept", methods=['GET'])
|
||||
def accept(uuid):
|
||||
datastore.data['watching'][uuid]['track_ldjson_price_data'] = PRICE_DATA_TRACK_ACCEPT
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
||||
return redirect(url_for("form_watch_checknow", uuid=uuid))
|
||||
|
||||
|
||||
@login_required
|
||||
@price_data_follower_blueprint.route("/<string:uuid>/reject", methods=['GET'])
|
||||
def reject(uuid):
|
||||
datastore.data['watching'][uuid]['track_ldjson_price_data'] = PRICE_DATA_TRACK_REJECT
|
||||
return redirect(url_for("index"))
|
||||
|
||||
|
||||
return price_data_follower_blueprint
|
||||
|
||||
|
||||
9
changedetectionio/blueprint/tags/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Groups tags
|
||||
|
||||
## How it works
|
||||
|
||||
Watch has a list() of tag UUID's, which relate to a config under application.settings.tags
|
||||
|
||||
The 'tag' is actually a watch, because they basically will eventually share 90% of the same config.
|
||||
|
||||
So a tag is like an abstract of a watch
|
||||
141
changedetectionio/blueprint/tags/__init__.py
Normal file
@@ -0,0 +1,141 @@
|
||||
from flask import Blueprint, request, make_response, render_template, flash, url_for, redirect
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
from changedetectionio import login_optionally_required
|
||||
|
||||
|
||||
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
tags_blueprint = Blueprint('tags', __name__, template_folder="templates")
|
||||
|
||||
@tags_blueprint.route("/list", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def tags_overview_page():
|
||||
from .form import SingleTag
|
||||
add_form = SingleTag(request.form)
|
||||
output = render_template("groups-overview.html",
|
||||
form=add_form,
|
||||
available_tags=datastore.data['settings']['application'].get('tags', {}),
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
@tags_blueprint.route("/add", methods=['POST'])
|
||||
@login_optionally_required
|
||||
def form_tag_add():
|
||||
from .form import SingleTag
|
||||
add_form = SingleTag(request.form)
|
||||
|
||||
if not add_form.validate():
|
||||
for widget, l in add_form.errors.items():
|
||||
flash(','.join(l), 'error')
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
title = request.form.get('name').strip()
|
||||
|
||||
if datastore.tag_exists_by_name(title):
|
||||
flash(f'The tag "{title}" already exists', "error")
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
datastore.add_tag(title)
|
||||
flash("Tag added")
|
||||
|
||||
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
@tags_blueprint.route("/mute/<string:uuid>", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def mute(uuid):
|
||||
if datastore.data['settings']['application']['tags'].get(uuid):
|
||||
datastore.data['settings']['application']['tags'][uuid]['notification_muted'] = not datastore.data['settings']['application']['tags'][uuid]['notification_muted']
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
@tags_blueprint.route("/delete/<string:uuid>", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def delete(uuid):
|
||||
removed = 0
|
||||
# Delete the tag, and any tag reference
|
||||
if datastore.data['settings']['application']['tags'].get(uuid):
|
||||
del datastore.data['settings']['application']['tags'][uuid]
|
||||
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if watch.get('tags') and uuid in watch['tags']:
|
||||
removed += 1
|
||||
watch['tags'].remove(uuid)
|
||||
|
||||
flash(f"Tag deleted and removed from {removed} watches")
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
@tags_blueprint.route("/unlink/<string:uuid>", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def unlink(uuid):
|
||||
unlinked = 0
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if watch.get('tags') and uuid in watch['tags']:
|
||||
unlinked += 1
|
||||
watch['tags'].remove(uuid)
|
||||
|
||||
flash(f"Tag unlinked removed from {unlinked} watches")
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
@tags_blueprint.route("/delete_all", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def delete_all():
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
watch['tags'] = []
|
||||
datastore.data['settings']['application']['tags'] = {}
|
||||
|
||||
flash(f"All tags deleted")
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
@tags_blueprint.route("/edit/<string:uuid>", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def form_tag_edit(uuid):
|
||||
from changedetectionio import forms
|
||||
|
||||
if uuid == 'first':
|
||||
uuid = list(datastore.data['settings']['application']['tags'].keys()).pop()
|
||||
|
||||
default = datastore.data['settings']['application']['tags'].get(uuid)
|
||||
|
||||
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||
data=default,
|
||||
)
|
||||
form.datastore=datastore # needed?
|
||||
|
||||
output = render_template("edit-tag.html",
|
||||
data=default,
|
||||
form=form,
|
||||
settings_application=datastore.data['settings']['application'],
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
@tags_blueprint.route("/edit/<string:uuid>", methods=['POST'])
|
||||
@login_optionally_required
|
||||
def form_tag_edit_submit(uuid):
|
||||
from changedetectionio import forms
|
||||
if uuid == 'first':
|
||||
uuid = list(datastore.data['settings']['application']['tags'].keys()).pop()
|
||||
|
||||
default = datastore.data['settings']['application']['tags'].get(uuid)
|
||||
|
||||
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||
data=default,
|
||||
)
|
||||
# @todo subclass form so validation works
|
||||
#if not form.validate():
|
||||
# for widget, l in form.errors.items():
|
||||
# flash(','.join(l), 'error')
|
||||
# return redirect(url_for('tags.form_tag_edit_submit', uuid=uuid))
|
||||
|
||||
datastore.data['settings']['application']['tags'][uuid].update(form.data)
|
||||
datastore.needs_write_urgent = True
|
||||
flash("Updated")
|
||||
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
|
||||
@tags_blueprint.route("/delete/<string:uuid>", methods=['GET'])
|
||||
def form_tag_delete(uuid):
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
return tags_blueprint
|
||||
22
changedetectionio/blueprint/tags/form.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from wtforms import (
|
||||
BooleanField,
|
||||
Form,
|
||||
IntegerField,
|
||||
RadioField,
|
||||
SelectField,
|
||||
StringField,
|
||||
SubmitField,
|
||||
TextAreaField,
|
||||
validators,
|
||||
)
|
||||
|
||||
|
||||
|
||||
class SingleTag(Form):
|
||||
|
||||
name = StringField('Tag name', [validators.InputRequired()], render_kw={"placeholder": "Name"})
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||
|
||||
|
||||
|
||||
|
||||
135
changedetectionio/blueprint/tags/templates/edit-tag.html
Normal file
@@ -0,0 +1,135 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||
<script>
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||
</script>
|
||||
|
||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<script>
|
||||
|
||||
/*{% if emailprefix %}*/
|
||||
/*const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');*/
|
||||
/*{% endif %}*/
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
<script src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script>
|
||||
<!--<script src="{{url_for('static_content', group='js', filename='limit.js')}}" defer></script>-->
|
||||
<script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||
|
||||
<div class="edit-form monospaced-textarea">
|
||||
|
||||
<div class="tabs collapsable">
|
||||
<ul>
|
||||
<li class="tab" id=""><a href="#general">General</a></li>
|
||||
<li class="tab"><a href="#filters-and-triggers">Filters & Triggers</a></li>
|
||||
<li class="tab"><a href="#notifications">Notifications</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="box-wrap inner">
|
||||
<form class="pure-form pure-form-stacked"
|
||||
action="{{ url_for('tags.form_tag_edit', uuid=data.uuid) }}" method="POST">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
|
||||
<div class="tab-pane-inner" id="general">
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.title, placeholder="https://...", required=true, class="m-d") }}
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="filters-and-triggers">
|
||||
<div class="pure-control-group">
|
||||
{% set field = render_field(form.include_filters,
|
||||
rows=5,
|
||||
placeholder="#example
|
||||
xpath://body/div/span[contains(@class, 'example-class')]",
|
||||
class="m-d")
|
||||
%}
|
||||
{{ field }}
|
||||
{% if '/text()' in field %}
|
||||
<span class="pure-form-message-inline"><strong>Note!: //text() function does not work where the <element> contains <![CDATA[]]></strong></span><br>
|
||||
{% endif %}
|
||||
<span class="pure-form-message-inline">One rule per line, <i>any</i> rules that matches will be used.<br>
|
||||
|
||||
<ul>
|
||||
<li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li>
|
||||
<li>JSON - Limit text to this JSON rule, using either <a href="https://pypi.org/project/jsonpath-ng/" target="new">JSONPath</a> or <a href="https://stedolan.github.io/jq/" target="new">jq</a> (if installed).
|
||||
<ul>
|
||||
<li>JSONPath: Prefix with <code>json:</code>, use <code>json:$</code> to force re-formatting if required, <a href="https://jsonpath.com/" target="new">test your JSONPath here</a>.</li>
|
||||
{% if jq_support %}
|
||||
<li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>.</li>
|
||||
{% else %}
|
||||
<li>jq support not installed</li>
|
||||
{% endif %}
|
||||
</ul>
|
||||
</li>
|
||||
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash,
|
||||
<ul>
|
||||
<li>Example: <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
|
||||
href="http://xpather.com/" target="new">test your XPath here</a></li>
|
||||
<li>Example: Get all titles from an RSS feed <code>//title/text()</code></li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
Please be sure that you thoroughly understand how to write CSS, JSONPath, XPath{% if jq_support %}, or jq selector{%endif%} rules before filing an issue on GitHub! <a
|
||||
href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br>
|
||||
</span>
|
||||
</div>
|
||||
<fieldset class="pure-control-group">
|
||||
{{ render_field(form.subtractive_selectors, rows=5, placeholder="header
|
||||
footer
|
||||
nav
|
||||
.stockticker") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li> Remove HTML element(s) by CSS selector before text conversion. </li>
|
||||
<li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li>
|
||||
</ul>
|
||||
</span>
|
||||
</fieldset>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="notifications">
|
||||
<fieldset>
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_checkbox_field(form.notification_muted) }}
|
||||
</div>
|
||||
{% if is_html_webdriver %}
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_checkbox_field(form.notification_screenshot) }}
|
||||
<span class="pure-form-message-inline">
|
||||
<strong>Use with caution!</strong> This will easily fill up your email storage quota or flood other storages.
|
||||
</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="field-group" id="notification-field-group">
|
||||
{% if has_default_notification_urls %}
|
||||
<div class="inline-warning">
|
||||
<img class="inline-warning-icon" src="{{url_for('static_content', group='images', filename='notice.svg')}}" alt="Look out!" title="Lookout!" >
|
||||
There are <a href="{{ url_for('settings_page')}}#notifications">system-wide notification URLs enabled</a>, this form will override notification settings for this watch only ‐ an empty Notification URL list here will still send notifications.
|
||||
</div>
|
||||
{% endif %}
|
||||
<a href="#notifications" id="notification-setting-reset-to-default" class="pure-button button-xsmall" style="right: 20px; top: 20px; position: absolute; background-color: #5f42dd; border-radius: 4px; font-size: 70%; color: #fff">Use system defaults</a>
|
||||
|
||||
{{ render_common_settings_form(form, emailprefix, settings_application) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div id="actions">
|
||||
<div class="pure-control-group">
|
||||
{{ render_button(form.save_button) }}
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
@@ -0,0 +1,60 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.jinja' import render_simple_field, render_field %}
|
||||
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
||||
|
||||
<div class="box">
|
||||
<form class="pure-form" action="{{ url_for('tags.form_tag_add') }}" method="POST" id="new-watch-form">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||
<fieldset>
|
||||
<legend>Add a new organisational tag</legend>
|
||||
<div id="watch-add-wrapper-zone">
|
||||
<div>
|
||||
{{ render_simple_field(form.name, placeholder="watch label / tag") }}
|
||||
</div>
|
||||
<div>
|
||||
{{ render_simple_field(form.save_button, title="Save" ) }}
|
||||
</div>
|
||||
</div>
|
||||
<br>
|
||||
<div style="color: #fff;">Groups allows you to manage filters and notifications for multiple watches under a single organisational tag.</div>
|
||||
</fieldset>
|
||||
</form>
|
||||
<!-- @todo maybe some overview matrix, 'tick' with which has notification, filter rules etc -->
|
||||
<div id="watch-table-wrapper">
|
||||
|
||||
<table class="pure-table pure-table-striped watch-table group-overview-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>Tag / Label name</th>
|
||||
<th></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<!--
|
||||
@Todo - connect Last checked, Last Changed, Number of Watches etc
|
||||
--->
|
||||
{% if not available_tags|length %}
|
||||
<tr>
|
||||
<td colspan="3">No website organisational tags/groups configured</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% for uuid, tag in available_tags.items() %}
|
||||
<tr id="{{ uuid }}" class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }}">
|
||||
<td class="watch-controls">
|
||||
<a class="link-mute state-{{'on' if tag.notification_muted else 'off'}}" href="{{url_for('tags.mute', uuid=tag.uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications" class="icon icon-mute" ></a>
|
||||
</td>
|
||||
<td class="title-col inline">{{tag.title}}</td>
|
||||
<td>
|
||||
<a class="pure-button pure-button-primary" href="{{ url_for('tags.form_tag_edit', uuid=uuid) }}">Edit</a>
|
||||
<a class="pure-button pure-button-primary" href="{{ url_for('tags.delete', uuid=uuid) }}" title="Deletes and removes tag">Delete</a>
|
||||
<a class="pure-button pure-button-primary" href="{{ url_for('tags.unlink', uuid=uuid) }}" title="Keep the tag but unlink any watches">Unlink</a>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -3,11 +3,14 @@
|
||||
# Launch as a eventlet.wsgi server instance.
|
||||
|
||||
from distutils.util import strtobool
|
||||
from json.decoder import JSONDecodeError
|
||||
|
||||
import eventlet
|
||||
import eventlet.wsgi
|
||||
import getopt
|
||||
import os
|
||||
import signal
|
||||
import socket
|
||||
import sys
|
||||
|
||||
from . import store, changedetection_app, content_fetcher
|
||||
@@ -28,11 +31,13 @@ def sigterm_handler(_signo, _stack_frame):
|
||||
def main():
|
||||
global datastore
|
||||
global app
|
||||
ssl_mode = False
|
||||
host = ''
|
||||
port = os.environ.get('PORT') or 5000
|
||||
do_cleanup = False
|
||||
|
||||
datastore_path = None
|
||||
do_cleanup = False
|
||||
host = ''
|
||||
ipv6_enabled = False
|
||||
port = os.environ.get('PORT') or 5000
|
||||
ssl_mode = False
|
||||
|
||||
# On Windows, create and use a default path.
|
||||
if os.name == 'nt':
|
||||
@@ -43,7 +48,7 @@ def main():
|
||||
datastore_path = os.path.join(os.getcwd(), "../datastore")
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "Ccsd:h:p:", "port")
|
||||
opts, args = getopt.getopt(sys.argv[1:], "6Ccsd:h:p:", "port")
|
||||
except getopt.GetoptError:
|
||||
print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path]')
|
||||
sys.exit(2)
|
||||
@@ -63,6 +68,10 @@ def main():
|
||||
if opt == '-d':
|
||||
datastore_path = arg
|
||||
|
||||
if opt == '-6':
|
||||
print ("Enabling IPv6 listen support")
|
||||
ipv6_enabled = True
|
||||
|
||||
# Cleanup (remove text files that arent in the index)
|
||||
if opt == '-c':
|
||||
do_cleanup = True
|
||||
@@ -83,8 +92,14 @@ def main():
|
||||
"Or use the -C parameter to create the directory.".format(app_config['datastore_path']), file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
try:
|
||||
datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], version_tag=__version__)
|
||||
except JSONDecodeError as e:
|
||||
# Dont' start if the JSON DB looks corrupt
|
||||
print ("ERROR: JSON DB or Proxy List JSON at '{}' appears to be corrupt, aborting".format(app_config['datastore_path']))
|
||||
print(str(e))
|
||||
return
|
||||
|
||||
datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], version_tag=__version__)
|
||||
app = changedetection_app(app_config, datastore)
|
||||
|
||||
signal.signal(signal.SIGTERM, sigterm_handler)
|
||||
@@ -124,13 +139,15 @@ def main():
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app, x_prefix=1, x_host=1)
|
||||
|
||||
s_type = socket.AF_INET6 if ipv6_enabled else socket.AF_INET
|
||||
|
||||
if ssl_mode:
|
||||
# @todo finalise SSL config, but this should get you in the right direction if you need it.
|
||||
eventlet.wsgi.server(eventlet.wrap_ssl(eventlet.listen((host, port)),
|
||||
eventlet.wsgi.server(eventlet.wrap_ssl(eventlet.listen((host, port), s_type),
|
||||
certfile='cert.pem',
|
||||
keyfile='privkey.pem',
|
||||
server_side=True), app)
|
||||
|
||||
else:
|
||||
eventlet.wsgi.server(eventlet.listen((host, int(port))), app)
|
||||
eventlet.wsgi.server(eventlet.listen((host, int(port)), s_type), app)
|
||||
|
||||
|
||||
@@ -1,14 +1,19 @@
|
||||
from abc import abstractmethod
|
||||
from distutils.util import strtobool
|
||||
from urllib.parse import urlparse
|
||||
import chardet
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import requests
|
||||
import sys
|
||||
import time
|
||||
import urllib.parse
|
||||
|
||||
visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4, header, footer, section, article, aside, details, main, nav, section, summary'
|
||||
|
||||
|
||||
class Non200ErrorCodeReceived(Exception):
|
||||
def __init__(self, status_code, url, screenshot=None, xpath_data=None, page_html=None):
|
||||
# Set this so we can use it in other parts of the app
|
||||
@@ -24,6 +29,11 @@ class Non200ErrorCodeReceived(Exception):
|
||||
return
|
||||
|
||||
|
||||
class checksumFromPreviousCheckWasTheSame(Exception):
|
||||
def __init__(self):
|
||||
return
|
||||
|
||||
|
||||
class JSActionExceptions(Exception):
|
||||
def __init__(self, status_code, url, screenshot, message=''):
|
||||
self.status_code = status_code
|
||||
@@ -32,6 +42,7 @@ class JSActionExceptions(Exception):
|
||||
self.message = message
|
||||
return
|
||||
|
||||
|
||||
class BrowserStepsStepTimout(Exception):
|
||||
def __init__(self, step_n):
|
||||
self.step_n = step_n
|
||||
@@ -39,7 +50,7 @@ class BrowserStepsStepTimout(Exception):
|
||||
|
||||
|
||||
class PageUnloadable(Exception):
|
||||
def __init__(self, status_code, url, screenshot=False, message=False):
|
||||
def __init__(self, status_code, url, message, screenshot=False):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
@@ -47,6 +58,7 @@ class PageUnloadable(Exception):
|
||||
self.message = message
|
||||
return
|
||||
|
||||
|
||||
class EmptyReply(Exception):
|
||||
def __init__(self, status_code, url, screenshot=None):
|
||||
# Set this so we can use it in other parts of the app
|
||||
@@ -55,6 +67,7 @@ class EmptyReply(Exception):
|
||||
self.screenshot = screenshot
|
||||
return
|
||||
|
||||
|
||||
class ScreenshotUnavailable(Exception):
|
||||
def __init__(self, status_code, url, page_html=None):
|
||||
# Set this so we can use it in other parts of the app
|
||||
@@ -65,27 +78,31 @@ class ScreenshotUnavailable(Exception):
|
||||
self.page_text = html_to_text(page_html)
|
||||
return
|
||||
|
||||
|
||||
class ReplyWithContentButNoText(Exception):
|
||||
def __init__(self, status_code, url, screenshot=None):
|
||||
def __init__(self, status_code, url, screenshot=None, has_filters=False, html_content=''):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
self.screenshot = screenshot
|
||||
self.has_filters = has_filters
|
||||
self.html_content = html_content
|
||||
return
|
||||
|
||||
|
||||
class Fetcher():
|
||||
error = None
|
||||
status_code = None
|
||||
content = None
|
||||
headers = None
|
||||
browser_steps = None
|
||||
browser_steps_screenshot_path = None
|
||||
|
||||
content = None
|
||||
error = None
|
||||
fetcher_description = "No description"
|
||||
headers = {}
|
||||
status_code = None
|
||||
webdriver_js_execute_code = None
|
||||
xpath_element_js = ""
|
||||
|
||||
xpath_data = None
|
||||
xpath_element_js = ""
|
||||
instock_data = None
|
||||
instock_data_js = ""
|
||||
|
||||
# Will be needed in the future by the VisualSelector, always get this where possible.
|
||||
screenshot = False
|
||||
@@ -99,7 +116,7 @@ class Fetcher():
|
||||
from pkg_resources import resource_string
|
||||
# The code that scrapes elements and makes a list of elements/size/position to click on in the VisualSelector
|
||||
self.xpath_element_js = resource_string(__name__, "res/xpath_element_scraper.js").decode('utf-8')
|
||||
|
||||
self.instock_data_js = resource_string(__name__, "res/stock-not-in-stock.js").decode('utf-8')
|
||||
|
||||
@abstractmethod
|
||||
def get_error(self):
|
||||
@@ -113,7 +130,8 @@ class Fetcher():
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_include_filters=None):
|
||||
current_include_filters=None,
|
||||
is_binary=False):
|
||||
# Should set self.error, self.status_code and self.content
|
||||
pass
|
||||
|
||||
@@ -134,6 +152,23 @@ class Fetcher():
|
||||
def is_ready(self):
|
||||
return True
|
||||
|
||||
def get_all_headers(self):
|
||||
"""
|
||||
Get all headers but ensure all keys are lowercase
|
||||
:return:
|
||||
"""
|
||||
return {k.lower(): v for k, v in self.headers.items()}
|
||||
|
||||
def browser_steps_get_valid_steps(self):
|
||||
if self.browser_steps is not None and len(self.browser_steps):
|
||||
valid_steps = filter(
|
||||
lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
|
||||
self.browser_steps)
|
||||
|
||||
return valid_steps
|
||||
|
||||
return None
|
||||
|
||||
def iterate_browser_steps(self):
|
||||
from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface
|
||||
from playwright._impl._api_types import TimeoutError
|
||||
@@ -145,14 +180,13 @@ class Fetcher():
|
||||
if self.browser_steps is not None and len(self.browser_steps):
|
||||
interface = steppable_browser_interface()
|
||||
interface.page = self.page
|
||||
|
||||
valid_steps = filter(lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'), self.browser_steps)
|
||||
valid_steps = self.browser_steps_get_valid_steps()
|
||||
|
||||
for step in valid_steps:
|
||||
step_n += 1
|
||||
print(">> Iterating check - browser Step n {} - {}...".format(step_n, step['operation']))
|
||||
self.screenshot_step("before-"+str(step_n))
|
||||
self.save_step_html("before-"+str(step_n))
|
||||
self.screenshot_step("before-" + str(step_n))
|
||||
self.save_step_html("before-" + str(step_n))
|
||||
try:
|
||||
optional_value = step['optional_value']
|
||||
selector = step['selector']
|
||||
@@ -167,12 +201,11 @@ class Fetcher():
|
||||
optional_value=optional_value)
|
||||
self.screenshot_step(step_n)
|
||||
self.save_step_html(step_n)
|
||||
except TimeoutError:
|
||||
except TimeoutError as e:
|
||||
print(str(e))
|
||||
# Stop processing here
|
||||
raise BrowserStepsStepTimout(step_n=step_n)
|
||||
|
||||
|
||||
|
||||
# It's always good to reset these
|
||||
def delete_browser_steps_screenshots(self):
|
||||
import glob
|
||||
@@ -180,7 +213,9 @@ class Fetcher():
|
||||
dest = os.path.join(self.browser_steps_screenshot_path, 'step_*.jpeg')
|
||||
files = glob.glob(dest)
|
||||
for f in files:
|
||||
os.unlink(f)
|
||||
if os.path.isfile(f):
|
||||
os.unlink(f)
|
||||
|
||||
|
||||
# Maybe for the future, each fetcher provides its own diff output, could be used for text, image
|
||||
# the current one would return javascript output (as we use JS to generate the diff)
|
||||
@@ -199,6 +234,7 @@ def available_fetchers():
|
||||
|
||||
return p
|
||||
|
||||
|
||||
class base_html_playwright(Fetcher):
|
||||
fetcher_description = "Playwright {}/Javascript".format(
|
||||
os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').capitalize()
|
||||
@@ -238,10 +274,14 @@ class base_html_playwright(Fetcher):
|
||||
if proxy_override:
|
||||
self.proxy = {'server': proxy_override}
|
||||
|
||||
def screenshot_step(self, step_n=''):
|
||||
if self.proxy:
|
||||
# Playwright needs separate username and password values
|
||||
parsed = urlparse(self.proxy.get('server'))
|
||||
if parsed.username:
|
||||
self.proxy['username'] = parsed.username
|
||||
self.proxy['password'] = parsed.password
|
||||
|
||||
# There's a bug where we need to do it twice or it doesnt take the whole page, dont know why.
|
||||
self.page.screenshot(type='jpeg', clip={'x': 1.0, 'y': 1.0, 'width': 1280, 'height': 1024})
|
||||
def screenshot_step(self, step_n=''):
|
||||
screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=85)
|
||||
|
||||
if self.browser_steps_screenshot_path is not None:
|
||||
@@ -257,6 +297,117 @@ class base_html_playwright(Fetcher):
|
||||
with open(destination, 'w') as f:
|
||||
f.write(content)
|
||||
|
||||
def run_fetch_browserless_puppeteer(self,
|
||||
url,
|
||||
timeout,
|
||||
request_headers,
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_include_filters=None,
|
||||
is_binary=False):
|
||||
|
||||
from pkg_resources import resource_string
|
||||
|
||||
extra_wait_ms = (int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay) * 1000
|
||||
|
||||
self.xpath_element_js = self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors)
|
||||
code = resource_string(__name__, "res/puppeteer_fetch.js").decode('utf-8')
|
||||
# In the future inject this is a proper JS package
|
||||
code = code.replace('%xpath_scrape_code%', self.xpath_element_js)
|
||||
code = code.replace('%instock_scrape_code%', self.instock_data_js)
|
||||
|
||||
from requests.exceptions import ConnectTimeout, ReadTimeout
|
||||
wait_browserless_seconds = 240
|
||||
|
||||
browserless_function_url = os.getenv('BROWSERLESS_FUNCTION_URL')
|
||||
from urllib.parse import urlparse
|
||||
if not browserless_function_url:
|
||||
# Convert/try to guess from PLAYWRIGHT_DRIVER_URL
|
||||
o = urlparse(os.getenv('PLAYWRIGHT_DRIVER_URL'))
|
||||
browserless_function_url = o._replace(scheme="http")._replace(path="function").geturl()
|
||||
|
||||
|
||||
# Append proxy connect string
|
||||
if self.proxy:
|
||||
# Remove username/password if it exists in the URL or you will receive "ERR_NO_SUPPORTED_PROXIES" error
|
||||
# Actual authentication handled by Puppeteer/node
|
||||
o = urlparse(self.proxy.get('server'))
|
||||
proxy_url = urllib.parse.quote(o._replace(netloc="{}:{}".format(o.hostname, o.port)).geturl())
|
||||
browserless_function_url = f"{browserless_function_url}&--proxy-server={proxy_url}"
|
||||
|
||||
try:
|
||||
amp = '&' if '?' in browserless_function_url else '?'
|
||||
response = requests.request(
|
||||
method="POST",
|
||||
json={
|
||||
"code": code,
|
||||
"context": {
|
||||
# Very primitive disk cache - USE WITH EXTREME CAUTION
|
||||
# Run browserless container with -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]"
|
||||
'disk_cache_dir': os.getenv("PUPPETEER_DISK_CACHE", False), # or path to disk cache ending in /, ie /tmp/cache/
|
||||
'execute_js': self.webdriver_js_execute_code,
|
||||
'extra_wait_ms': extra_wait_ms,
|
||||
'include_filters': current_include_filters,
|
||||
'req_headers': request_headers,
|
||||
'screenshot_quality': int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)),
|
||||
'url': url,
|
||||
'user_agent': {k.lower(): v for k, v in request_headers.items()}.get('user-agent', None),
|
||||
'proxy_username': self.proxy.get('username', '') if self.proxy else False,
|
||||
'proxy_password': self.proxy.get('password', '') if self.proxy and self.proxy.get('username') else False,
|
||||
'no_cache_list': [
|
||||
'twitter',
|
||||
'.pdf'
|
||||
],
|
||||
# Could use https://github.com/easylist/easylist here, or install a plugin
|
||||
'block_url_list': [
|
||||
'adnxs.com',
|
||||
'analytics.twitter.com',
|
||||
'doubleclick.net',
|
||||
'google-analytics.com',
|
||||
'googletagmanager',
|
||||
'trustpilot.com'
|
||||
]
|
||||
}
|
||||
},
|
||||
# @todo /function needs adding ws:// to http:// rebuild this
|
||||
url=browserless_function_url+f"{amp}--disable-features=AudioServiceOutOfProcess&dumpio=true&--disable-remote-fonts",
|
||||
timeout=wait_browserless_seconds)
|
||||
|
||||
except ReadTimeout:
|
||||
raise PageUnloadable(url=url, status_code=None, message=f"No response from browserless in {wait_browserless_seconds}s")
|
||||
except ConnectTimeout:
|
||||
raise PageUnloadable(url=url, status_code=None, message=f"Timed out connecting to browserless, retrying..")
|
||||
else:
|
||||
# 200 Here means that the communication to browserless worked only, not the page state
|
||||
if response.status_code == 200:
|
||||
import base64
|
||||
|
||||
x = response.json()
|
||||
if not x.get('screenshot'):
|
||||
# https://github.com/puppeteer/puppeteer/blob/v1.0.0/docs/troubleshooting.md#tips
|
||||
# https://github.com/puppeteer/puppeteer/issues/1834
|
||||
# https://github.com/puppeteer/puppeteer/issues/1834#issuecomment-381047051
|
||||
# Check your memory is shared and big enough
|
||||
raise ScreenshotUnavailable(url=url, status_code=None)
|
||||
|
||||
if not x.get('content', '').strip():
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
if x.get('status_code', 200) != 200 and not ignore_status_codes:
|
||||
raise Non200ErrorCodeReceived(url=url, status_code=x.get('status_code', 200), page_html=x['content'])
|
||||
|
||||
self.content = x.get('content')
|
||||
self.headers = x.get('headers')
|
||||
self.instock_data = x.get('instock_data')
|
||||
self.screenshot = base64.b64decode(x.get('screenshot'))
|
||||
self.status_code = x.get('status_code')
|
||||
self.xpath_data = x.get('xpath_data')
|
||||
|
||||
else:
|
||||
# Some other error from browserless
|
||||
raise PageUnloadable(url=url, status_code=None, message=response.content.decode('utf-8'))
|
||||
|
||||
def run(self,
|
||||
url,
|
||||
timeout,
|
||||
@@ -264,13 +415,33 @@ class base_html_playwright(Fetcher):
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_include_filters=None):
|
||||
current_include_filters=None,
|
||||
is_binary=False):
|
||||
|
||||
# For now, USE_EXPERIMENTAL_PUPPETEER_FETCH is not supported by watches with BrowserSteps (for now!)
|
||||
has_browser_steps = self.browser_steps and list(filter(
|
||||
lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
|
||||
self.browser_steps))
|
||||
|
||||
if not has_browser_steps and os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH'):
|
||||
if strtobool(os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH')):
|
||||
# Temporary backup solution until we rewrite the playwright code
|
||||
return self.run_fetch_browserless_puppeteer(
|
||||
url,
|
||||
timeout,
|
||||
request_headers,
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes,
|
||||
current_include_filters,
|
||||
is_binary)
|
||||
|
||||
from playwright.sync_api import sync_playwright
|
||||
import playwright._impl._api_types
|
||||
|
||||
self.delete_browser_steps_screenshots()
|
||||
response = None
|
||||
|
||||
with sync_playwright() as p:
|
||||
browser_type = getattr(p, self.browser_type)
|
||||
|
||||
@@ -279,13 +450,18 @@ class base_html_playwright(Fetcher):
|
||||
# 60,000 connection timeout only
|
||||
browser = browser_type.connect_over_cdp(self.command_executor, timeout=60000)
|
||||
|
||||
# SOCKS5 with authentication is not supported (yet)
|
||||
# https://github.com/microsoft/playwright/issues/10567
|
||||
|
||||
# Set user agent to prevent Cloudflare from blocking the browser
|
||||
# Use the default one configured in the App.py model that's passed from fetch_site_status.py
|
||||
context = browser.new_context(
|
||||
user_agent=request_headers['User-Agent'] if request_headers.get('User-Agent') else 'Mozilla/5.0',
|
||||
user_agent={k.lower(): v for k, v in request_headers.items()}.get('user-agent', None),
|
||||
proxy=self.proxy,
|
||||
# This is needed to enable JavaScript execution on GitHub and others
|
||||
bypass_csp=True,
|
||||
# Should be `allow` or `block` - sites like YouTube can transmit large amounts of data via Service Workers
|
||||
service_workers=os.getenv('PLAYWRIGHT_SERVICE_WORKERS', 'allow'),
|
||||
# Should never be needed
|
||||
accept_downloads=False
|
||||
)
|
||||
@@ -294,74 +470,57 @@ class base_html_playwright(Fetcher):
|
||||
if len(request_headers):
|
||||
context.set_extra_http_headers(request_headers)
|
||||
|
||||
# Listen for all console events and handle errors
|
||||
self.page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}"))
|
||||
|
||||
# Re-use as much code from browser steps as possible so its the same
|
||||
from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface
|
||||
browsersteps_interface = steppable_browser_interface()
|
||||
browsersteps_interface.page = self.page
|
||||
|
||||
response = browsersteps_interface.action_goto_url(value=url)
|
||||
self.headers = response.all_headers()
|
||||
|
||||
if response is None:
|
||||
context.close()
|
||||
browser.close()
|
||||
print("Content Fetcher > Response object was none")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
try:
|
||||
self.page.set_default_navigation_timeout(90000)
|
||||
self.page.set_default_timeout(90000)
|
||||
|
||||
# Listen for all console events and handle errors
|
||||
self.page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}"))
|
||||
|
||||
# Bug - never set viewport size BEFORE page.goto
|
||||
|
||||
|
||||
# Waits for the next navigation. Using Python context manager
|
||||
# prevents a race condition between clicking and waiting for a navigation.
|
||||
with self.page.expect_navigation():
|
||||
response = self.page.goto(url, wait_until='load')
|
||||
# Wait_until = commit
|
||||
# - `'commit'` - consider operation to be finished when network response is received and the document started loading.
|
||||
# Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
|
||||
# This seemed to solve nearly all 'TimeoutErrors'
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
|
||||
self.page.wait_for_timeout(extra_wait * 1000)
|
||||
|
||||
if self.webdriver_js_execute_code is not None and len(self.webdriver_js_execute_code):
|
||||
self.page.evaluate(self.webdriver_js_execute_code)
|
||||
|
||||
browsersteps_interface.action_execute_js(value=self.webdriver_js_execute_code, selector=None)
|
||||
except playwright._impl._api_types.TimeoutError as e:
|
||||
context.close()
|
||||
browser.close()
|
||||
# This can be ok, we will try to grab what we could retrieve
|
||||
pass
|
||||
except Exception as e:
|
||||
print ("other exception when page.goto")
|
||||
print (str(e))
|
||||
print("Content Fetcher > Other exception when executing custom JS code", str(e))
|
||||
context.close()
|
||||
browser.close()
|
||||
raise PageUnloadable(url=url, status_code=None)
|
||||
|
||||
|
||||
if response is None:
|
||||
context.close()
|
||||
browser.close()
|
||||
print ("response object was none")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
# Bug 2(?) Set the viewport size AFTER loading the page
|
||||
self.page.set_viewport_size({"width": 1280, "height": 1024})
|
||||
|
||||
# Run Browser Steps here
|
||||
self.iterate_browser_steps()
|
||||
raise PageUnloadable(url=url, status_code=None, message=str(e))
|
||||
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
|
||||
time.sleep(extra_wait)
|
||||
self.page.wait_for_timeout(extra_wait * 1000)
|
||||
|
||||
|
||||
self.content = self.page.content()
|
||||
self.status_code = response.status
|
||||
|
||||
if self.status_code != 200 and not ignore_status_codes:
|
||||
raise Non200ErrorCodeReceived(url=url, status_code=self.status_code)
|
||||
|
||||
if len(self.page.content().strip()) == 0:
|
||||
context.close()
|
||||
browser.close()
|
||||
print ("Content was empty")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
print("Content Fetcher > Content was empty")
|
||||
raise EmptyReply(url=url, status_code=response.status)
|
||||
|
||||
# Bug 2(?) Set the viewport size AFTER loading the page
|
||||
self.page.set_viewport_size({"width": 1280, "height": 1024})
|
||||
|
||||
self.status_code = response.status
|
||||
self.content = self.page.content()
|
||||
self.headers = response.all_headers()
|
||||
# Run Browser Steps here
|
||||
if self.browser_steps_get_valid_steps():
|
||||
self.iterate_browser_steps()
|
||||
|
||||
self.page.wait_for_timeout(extra_wait * 1000)
|
||||
|
||||
# So we can find an element on the page where its selector was entered manually (maybe not xPath etc)
|
||||
if current_include_filters is not None:
|
||||
@@ -369,8 +528,11 @@ class base_html_playwright(Fetcher):
|
||||
else:
|
||||
self.page.evaluate("var include_filters=''")
|
||||
|
||||
self.xpath_data = self.page.evaluate("async () => {" + self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) + "}")
|
||||
self.xpath_data = self.page.evaluate(
|
||||
"async () => {" + self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) + "}")
|
||||
self.instock_data = self.page.evaluate("async () => {" + self.instock_data_js + "}")
|
||||
|
||||
self.content = self.page.content()
|
||||
# Bug 3 in Playwright screenshot handling
|
||||
# Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it
|
||||
# JPEG is better here because the screenshots can be very very large
|
||||
@@ -379,18 +541,18 @@ class base_html_playwright(Fetcher):
|
||||
# which will significantly increase the IO size between the server and client, it's recommended to use the lowest
|
||||
# acceptable screenshot quality here
|
||||
try:
|
||||
# Quality set to 1 because it's not used, just used as a work-around for a bug, no need to change this.
|
||||
self.page.screenshot(type='jpeg', clip={'x': 1.0, 'y': 1.0, 'width': 1280, 'height': 1024}, quality=1)
|
||||
# The actual screenshot
|
||||
self.screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)))
|
||||
self.screenshot = self.page.screenshot(type='jpeg', full_page=True,
|
||||
quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)))
|
||||
except Exception as e:
|
||||
context.close()
|
||||
browser.close()
|
||||
raise ScreenshotUnavailable(url=url, status_code=None)
|
||||
raise ScreenshotUnavailable(url=url, status_code=response.status_code)
|
||||
|
||||
context.close()
|
||||
browser.close()
|
||||
|
||||
|
||||
class base_html_webdriver(Fetcher):
|
||||
if os.getenv("WEBDRIVER_URL"):
|
||||
fetcher_description = "WebDriver Chrome/Javascript via '{}'".format(os.getenv("WEBDRIVER_URL"))
|
||||
@@ -440,18 +602,21 @@ class base_html_webdriver(Fetcher):
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_include_filters=None):
|
||||
current_include_filters=None,
|
||||
is_binary=False):
|
||||
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.webdriver.chrome.options import Options as ChromeOptions
|
||||
from selenium.common.exceptions import WebDriverException
|
||||
# request_body, request_method unused for now, until some magic in the future happens.
|
||||
|
||||
# check env for WEBDRIVER_URL
|
||||
options = ChromeOptions()
|
||||
if self.proxy:
|
||||
options.proxy = self.proxy
|
||||
|
||||
self.driver = webdriver.Remote(
|
||||
command_executor=self.command_executor,
|
||||
desired_capabilities=DesiredCapabilities.CHROME,
|
||||
proxy=self.proxy)
|
||||
options=options)
|
||||
|
||||
try:
|
||||
self.driver.get(url)
|
||||
@@ -483,11 +648,11 @@ class base_html_webdriver(Fetcher):
|
||||
# Does the connection to the webdriver work? run a test connection.
|
||||
def is_ready(self):
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.webdriver.chrome.options import Options as ChromeOptions
|
||||
|
||||
self.driver = webdriver.Remote(
|
||||
command_executor=self.command_executor,
|
||||
desired_capabilities=DesiredCapabilities.CHROME)
|
||||
options=ChromeOptions())
|
||||
|
||||
# driver.quit() seems to cause better exceptions
|
||||
self.quit()
|
||||
@@ -498,7 +663,7 @@ class base_html_webdriver(Fetcher):
|
||||
try:
|
||||
self.driver.quit()
|
||||
except Exception as e:
|
||||
print("Exception in chrome shutdown/quit" + str(e))
|
||||
print("Content Fetcher > Exception in chrome shutdown/quit" + str(e))
|
||||
|
||||
|
||||
# "html_requests" is listed as the default fetcher in store.py!
|
||||
@@ -515,16 +680,21 @@ class html_requests(Fetcher):
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_include_filters=None):
|
||||
current_include_filters=None,
|
||||
is_binary=False):
|
||||
|
||||
# Make requests use a more modern looking user-agent
|
||||
if not 'User-Agent' in request_headers:
|
||||
if not {k.lower(): v for k, v in request_headers.items()}.get('user-agent', None):
|
||||
request_headers['User-Agent'] = os.getenv("DEFAULT_SETTINGS_HEADERS_USERAGENT",
|
||||
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36')
|
||||
|
||||
proxies = {}
|
||||
|
||||
# Allows override the proxy on a per-request basis
|
||||
|
||||
# https://requests.readthedocs.io/en/latest/user/advanced/#socks
|
||||
# Should also work with `socks5://user:pass@host:port` type syntax.
|
||||
|
||||
if self.proxy_override:
|
||||
proxies = {'http': self.proxy_override, 'https': self.proxy_override, 'ftp': self.proxy_override}
|
||||
else:
|
||||
@@ -545,10 +715,12 @@ class html_requests(Fetcher):
|
||||
# For example - some sites don't tell us it's utf-8, but return utf-8 content
|
||||
# This seems to not occur when using webdriver/selenium, it seems to detect the text encoding more reliably.
|
||||
# https://github.com/psf/requests/issues/1604 good info about requests encoding detection
|
||||
if not r.headers.get('content-type') or not 'charset=' in r.headers.get('content-type'):
|
||||
encoding = chardet.detect(r.content)['encoding']
|
||||
if encoding:
|
||||
r.encoding = encoding
|
||||
if not is_binary:
|
||||
# Don't run this for PDF (and requests identified as binary) takes a _long_ time
|
||||
if not r.headers.get('content-type') or not 'charset=' in r.headers.get('content-type'):
|
||||
encoding = chardet.detect(r.content)['encoding']
|
||||
if encoding:
|
||||
r.encoding = encoding
|
||||
|
||||
if not r.content or not len(r.content):
|
||||
raise EmptyReply(url=url, status_code=r.status_code)
|
||||
@@ -560,8 +732,14 @@ class html_requests(Fetcher):
|
||||
raise Non200ErrorCodeReceived(url=url, status_code=r.status_code, page_html=r.text)
|
||||
|
||||
self.status_code = r.status_code
|
||||
self.content = r.text
|
||||
if is_binary:
|
||||
# Binary files just return their checksum until we add something smarter
|
||||
self.content = hashlib.md5(r.content).hexdigest()
|
||||
else:
|
||||
self.content = r.text
|
||||
|
||||
self.headers = r.headers
|
||||
self.raw_content = r.content
|
||||
|
||||
|
||||
# Decide which is the 'real' HTML webdriver, this is more a system wide config
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
FROM python:3.8-slim
|
||||
|
||||
# https://stackoverflow.com/questions/58701233/docker-logs-erroneously-appears-empty-until-container-stops
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN [ ! -d "/datastore" ] && mkdir /datastore
|
||||
|
||||
COPY sleep.py /
|
||||
CMD [ "python", "/sleep.py" ]
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
import time
|
||||
|
||||
print ("Sleep loop, you should run your script from the console")
|
||||
|
||||
while True:
|
||||
# Wait for 5 seconds
|
||||
time.sleep(2)
|
||||
@@ -10,7 +10,7 @@ def same_slicer(l, a, b):
|
||||
return l[a:b]
|
||||
|
||||
# like .compare but a little different output
|
||||
def customSequenceMatcher(before, after, include_equal=False):
|
||||
def customSequenceMatcher(before, after, include_equal=False, include_removed=True, include_added=True, include_replaced=True, include_change_type_prefix=True):
|
||||
cruncher = difflib.SequenceMatcher(isjunk=lambda x: x in " \\t", a=before, b=after)
|
||||
|
||||
# @todo Line-by-line mode instead of buncghed, including `after` that is not in `before` (maybe unset?)
|
||||
@@ -18,35 +18,45 @@ def customSequenceMatcher(before, after, include_equal=False):
|
||||
if include_equal and tag == 'equal':
|
||||
g = before[alo:ahi]
|
||||
yield g
|
||||
elif tag == 'delete':
|
||||
g = ["(removed) " + i for i in same_slicer(before, alo, ahi)]
|
||||
elif include_removed and tag == 'delete':
|
||||
row_prefix = "(removed) " if include_change_type_prefix else ''
|
||||
g = [ row_prefix + i for i in same_slicer(before, alo, ahi)]
|
||||
yield g
|
||||
elif tag == 'replace':
|
||||
g = ["(changed) " + i for i in same_slicer(before, alo, ahi)]
|
||||
g += ["(into ) " + i for i in same_slicer(after, blo, bhi)]
|
||||
elif include_replaced and tag == 'replace':
|
||||
row_prefix = "(changed) " if include_change_type_prefix else ''
|
||||
g = [row_prefix + i for i in same_slicer(before, alo, ahi)]
|
||||
row_prefix = "(into) " if include_change_type_prefix else ''
|
||||
g += [row_prefix + i for i in same_slicer(after, blo, bhi)]
|
||||
yield g
|
||||
elif tag == 'insert':
|
||||
g = ["(added ) " + i for i in same_slicer(after, blo, bhi)]
|
||||
elif include_added and tag == 'insert':
|
||||
row_prefix = "(added) " if include_change_type_prefix else ''
|
||||
g = [row_prefix + i for i in same_slicer(after, blo, bhi)]
|
||||
yield g
|
||||
|
||||
# only_differences - only return info about the differences, no context
|
||||
# line_feed_sep could be "<br/>" or "<li>" or "\n" etc
|
||||
def render_diff(previous_file, newest_file, include_equal=False, line_feed_sep="\n"):
|
||||
with open(newest_file, 'r') as f:
|
||||
newest_version_file_contents = f.read()
|
||||
newest_version_file_contents = [line.rstrip() for line in newest_version_file_contents.splitlines()]
|
||||
# line_feed_sep could be "<br>" or "<li>" or "\n" etc
|
||||
def render_diff(previous_version_file_contents, newest_version_file_contents, include_equal=False, include_removed=True, include_added=True, include_replaced=True, line_feed_sep="\n", include_change_type_prefix=True, patch_format=False):
|
||||
|
||||
if previous_file:
|
||||
with open(previous_file, 'r') as f:
|
||||
previous_version_file_contents = f.read()
|
||||
previous_version_file_contents = [line.rstrip() for line in previous_version_file_contents.splitlines()]
|
||||
newest_version_file_contents = [line.rstrip() for line in newest_version_file_contents.splitlines()]
|
||||
|
||||
if previous_version_file_contents:
|
||||
previous_version_file_contents = [line.rstrip() for line in previous_version_file_contents.splitlines()]
|
||||
else:
|
||||
previous_version_file_contents = ""
|
||||
|
||||
rendered_diff = customSequenceMatcher(previous_version_file_contents,
|
||||
newest_version_file_contents,
|
||||
include_equal)
|
||||
if patch_format:
|
||||
patch = difflib.unified_diff(previous_version_file_contents, newest_version_file_contents)
|
||||
return line_feed_sep.join(patch)
|
||||
|
||||
rendered_diff = customSequenceMatcher(before=previous_version_file_contents,
|
||||
after=newest_version_file_contents,
|
||||
include_equal=include_equal,
|
||||
include_removed=include_removed,
|
||||
include_added=include_added,
|
||||
include_replaced=include_replaced,
|
||||
include_change_type_prefix=include_change_type_prefix)
|
||||
|
||||
# Recursively join lists
|
||||
f = lambda L: line_feed_sep.join([f(x) if type(x) is list else x for x in L])
|
||||
return f(rendered_diff)
|
||||
p= f(rendered_diff)
|
||||
return p
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import os
|
||||
import re
|
||||
from distutils.util import strtobool
|
||||
|
||||
from wtforms import (
|
||||
BooleanField,
|
||||
@@ -14,20 +15,28 @@ from wtforms import (
|
||||
validators,
|
||||
widgets
|
||||
)
|
||||
from flask_wtf.file import FileField, FileAllowed
|
||||
from wtforms.fields import FieldList
|
||||
|
||||
from wtforms.validators import ValidationError
|
||||
|
||||
from validators.url import url as url_validator
|
||||
|
||||
|
||||
# default
|
||||
# each select <option data-enabled="enabled-0-0"
|
||||
from changedetectionio.blueprint.browser_steps.browser_steps import browser_step_ui_config
|
||||
|
||||
from changedetectionio import content_fetcher
|
||||
from changedetectionio import content_fetcher, html_tools
|
||||
|
||||
from changedetectionio.notification import (
|
||||
valid_notification_formats,
|
||||
)
|
||||
|
||||
from wtforms.fields import FormField
|
||||
|
||||
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
|
||||
|
||||
valid_method = {
|
||||
'GET',
|
||||
'POST',
|
||||
@@ -37,7 +46,7 @@ valid_method = {
|
||||
}
|
||||
|
||||
default_method = 'GET'
|
||||
|
||||
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
|
||||
|
||||
class StringListField(StringField):
|
||||
widget = widgets.TextArea()
|
||||
@@ -90,6 +99,29 @@ class SaltyPasswordField(StringField):
|
||||
else:
|
||||
self.data = False
|
||||
|
||||
class StringTagUUID(StringField):
|
||||
|
||||
# process_formdata(self, valuelist) handled manually in POST handler
|
||||
|
||||
# Is what is shown when field <input> is rendered
|
||||
def _value(self):
|
||||
# Tag UUID to name, on submit it will convert it back (in the submit handler of init.py)
|
||||
if self.data and type(self.data) is list:
|
||||
tag_titles = []
|
||||
for i in self.data:
|
||||
tag = self.datastore.data['settings']['application']['tags'].get(i)
|
||||
if tag:
|
||||
tag_title = tag.get('title')
|
||||
if tag_title:
|
||||
tag_titles.append(tag_title)
|
||||
|
||||
return ', '.join(tag_titles)
|
||||
|
||||
if not self.data:
|
||||
return ''
|
||||
|
||||
return 'error'
|
||||
|
||||
class TimeBetweenCheckForm(Form):
|
||||
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
@@ -138,7 +170,7 @@ class ValidateContentFetcherIsReady(object):
|
||||
from changedetectionio import content_fetcher
|
||||
|
||||
# Better would be a radiohandler that keeps a reference to each class
|
||||
if field.data is not None:
|
||||
if field.data is not None and field.data != 'system':
|
||||
klass = getattr(content_fetcher, field.data)
|
||||
some_object = klass()
|
||||
try:
|
||||
@@ -147,12 +179,12 @@ class ValidateContentFetcherIsReady(object):
|
||||
except urllib3.exceptions.MaxRetryError as e:
|
||||
driver_url = some_object.command_executor
|
||||
message = field.gettext('Content fetcher \'%s\' did not respond.' % (field.data))
|
||||
message += '<br/>' + field.gettext(
|
||||
message += '<br>' + field.gettext(
|
||||
'Be sure that the selenium/webdriver runner is running and accessible via network from this container/host.')
|
||||
message += '<br/>' + field.gettext('Did you follow the instructions in the wiki?')
|
||||
message += '<br/><br/>' + field.gettext('WebDriver Host: %s' % (driver_url))
|
||||
message += '<br/><a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">Go here for more information</a>'
|
||||
message += '<br/>'+field.gettext('Content fetcher did not respond properly, unable to use it.\n %s' % (str(e)))
|
||||
message += '<br>' + field.gettext('Did you follow the instructions in the wiki?')
|
||||
message += '<br><br>' + field.gettext('WebDriver Host: %s' % (driver_url))
|
||||
message += '<br><a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">Go here for more information</a>'
|
||||
message += '<br>'+field.gettext('Content fetcher did not respond properly, unable to use it.\n %s' % (str(e)))
|
||||
|
||||
raise ValidationError(message)
|
||||
|
||||
@@ -193,7 +225,7 @@ class ValidateAppRiseServers(object):
|
||||
message = field.gettext('\'%s\' is not a valid AppRise URL.' % (server_url))
|
||||
raise ValidationError(message)
|
||||
|
||||
class ValidateTokensList(object):
|
||||
class ValidateJinja2Template(object):
|
||||
"""
|
||||
Validates that a {token} is from a valid set
|
||||
"""
|
||||
@@ -202,14 +234,30 @@ class ValidateTokensList(object):
|
||||
|
||||
def __call__(self, form, field):
|
||||
from changedetectionio import notification
|
||||
regex = re.compile('{.*?}')
|
||||
for p in re.findall(regex, field.data):
|
||||
if not p.strip('{}') in notification.valid_tokens:
|
||||
message = field.gettext('Token \'%s\' is not a valid token.')
|
||||
raise ValidationError(message % (p))
|
||||
|
||||
|
||||
from jinja2 import Environment, BaseLoader, TemplateSyntaxError, UndefinedError
|
||||
from jinja2.meta import find_undeclared_variables
|
||||
|
||||
|
||||
try:
|
||||
jinja2_env = Environment(loader=BaseLoader)
|
||||
jinja2_env.globals.update(notification.valid_tokens)
|
||||
|
||||
rendered = jinja2_env.from_string(field.data).render()
|
||||
except TemplateSyntaxError as e:
|
||||
raise ValidationError(f"This is not a valid Jinja2 template: {e}") from e
|
||||
except UndefinedError as e:
|
||||
raise ValidationError(f"A variable or function is not defined: {e}") from e
|
||||
|
||||
ast = jinja2_env.parse(field.data)
|
||||
undefined = ", ".join(find_undeclared_variables(ast))
|
||||
if undefined:
|
||||
raise ValidationError(
|
||||
f"The following tokens used in the notification are not valid: {undefined}"
|
||||
)
|
||||
|
||||
class validateURL(object):
|
||||
|
||||
|
||||
"""
|
||||
Flask wtform validators wont work with basic auth
|
||||
"""
|
||||
@@ -218,12 +266,23 @@ class validateURL(object):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
import validators
|
||||
try:
|
||||
validators.url(field.data.strip())
|
||||
except validators.ValidationFailure:
|
||||
message = field.gettext('\'%s\' is not a valid URL.' % (field.data.strip()))
|
||||
raise ValidationError(message)
|
||||
# This should raise a ValidationError() or not
|
||||
validate_url(field.data)
|
||||
|
||||
def validate_url(test_url):
|
||||
# If hosts that only contain alphanumerics are allowed ("localhost" for example)
|
||||
try:
|
||||
url_validator(test_url, simple_host=allow_simplehost)
|
||||
except validators.ValidationError:
|
||||
#@todo check for xss
|
||||
message = f"'{test_url}' is not a valid URL."
|
||||
# This should be wtforms.validators.
|
||||
raise ValidationError(message)
|
||||
|
||||
from .model.Watch import is_safe_url
|
||||
if not is_safe_url(test_url):
|
||||
# This should be wtforms.validators.
|
||||
raise ValidationError('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX or incorrect URL format')
|
||||
|
||||
class ValidateListRegex(object):
|
||||
"""
|
||||
@@ -235,11 +294,10 @@ class ValidateListRegex(object):
|
||||
def __call__(self, form, field):
|
||||
|
||||
for line in field.data:
|
||||
if line[0] == '/' and line[-1] == '/':
|
||||
# Because internally we dont wrap in /
|
||||
line = line.strip('/')
|
||||
if re.search(html_tools.PERL_STYLE_REGEX, line, re.IGNORECASE):
|
||||
try:
|
||||
re.compile(line)
|
||||
regex = html_tools.perl_style_slash_enclosed_regex_to_options(line)
|
||||
re.compile(regex)
|
||||
except re.error:
|
||||
message = field.gettext('RegEx \'%s\' is not a valid regular expression.')
|
||||
raise ValidationError(message % (line))
|
||||
@@ -325,22 +383,33 @@ class ValidateCSSJSONXPATHInput(object):
|
||||
raise ValidationError("A system-error occurred when validating your jq expression")
|
||||
|
||||
class quickWatchForm(Form):
|
||||
from . import processors
|
||||
|
||||
url = fields.URLField('URL', validators=[validateURL()])
|
||||
tag = StringField('Group tag', [validators.Optional()])
|
||||
tags = StringTagUUID('Group tag', [validators.Optional()])
|
||||
watch_submit_button = SubmitField('Watch', render_kw={"class": "pure-button pure-button-primary"})
|
||||
processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff")
|
||||
edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"})
|
||||
|
||||
|
||||
# Common to a single watch and the global settings
|
||||
class commonSettingsForm(Form):
|
||||
notification_urls = StringListField('Notification URL list', validators=[validators.Optional(), ValidateAppRiseServers()])
|
||||
notification_title = StringField('Notification title', validators=[validators.Optional(), ValidateTokensList()])
|
||||
notification_body = TextAreaField('Notification body', validators=[validators.Optional(), ValidateTokensList()])
|
||||
|
||||
notification_urls = StringListField('Notification URL List', validators=[validators.Optional(), ValidateAppRiseServers()])
|
||||
notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()])
|
||||
notification_body = TextAreaField('Notification Body', default='{{ watch_url }} had a change.', validators=[validators.Optional(), ValidateJinja2Template()])
|
||||
notification_format = SelectField('Notification format', choices=valid_notification_formats.keys())
|
||||
fetch_backend = RadioField(u'Fetch method', choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
fetch_backend = RadioField(u'Fetch Method', choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
extract_title_as_title = BooleanField('Extract <title> from document and use as watch title', default=False)
|
||||
webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1,
|
||||
message="Should contain one or more seconds")])
|
||||
class importForm(Form):
|
||||
from . import processors
|
||||
processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff")
|
||||
urls = TextAreaField('URLs')
|
||||
xlsx_file = FileField('Upload .xlsx file', validators=[FileAllowed(['xlsx'], 'Must be .xlsx file!')])
|
||||
file_mapping = SelectField('File mapping', [validators.DataRequired()], choices={('wachete', 'Wachete mapping'), ('custom','Custom mapping')})
|
||||
|
||||
|
||||
class SingleBrowserStep(Form):
|
||||
|
||||
@@ -356,7 +425,7 @@ class SingleBrowserStep(Form):
|
||||
class watchForm(commonSettingsForm):
|
||||
|
||||
url = fields.URLField('URL', validators=[validateURL()])
|
||||
tag = StringField('Group tag', [validators.Optional()], default='')
|
||||
tags = StringTagUUID('Group tag', [validators.Optional()], default='')
|
||||
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
|
||||
@@ -373,11 +442,19 @@ class watchForm(commonSettingsForm):
|
||||
body = TextAreaField('Request body', [validators.Optional()])
|
||||
method = SelectField('Request method', choices=valid_method, default=default_method)
|
||||
ignore_status_codes = BooleanField('Ignore status codes (process non-2xx status codes as normal)', default=False)
|
||||
check_unique_lines = BooleanField('Only trigger when new lines appear', default=False)
|
||||
check_unique_lines = BooleanField('Only trigger when unique lines appear', default=False)
|
||||
|
||||
filter_text_added = BooleanField('Added lines', default=True)
|
||||
filter_text_replaced = BooleanField('Replaced/changed lines', default=True)
|
||||
filter_text_removed = BooleanField('Removed lines', default=True)
|
||||
|
||||
# @todo this class could be moved to its own text_json_diff_watchForm and this goes to restock_diff_Watchform perhaps
|
||||
in_stock_only = BooleanField('Only trigger when product goes BACK to in-stock', default=True)
|
||||
|
||||
trigger_text = StringListField('Trigger/wait for text', [validators.Optional(), ValidateListRegex()])
|
||||
if os.getenv("PLAYWRIGHT_DRIVER_URL"):
|
||||
browser_steps = FieldList(FormField(SingleBrowserStep), min_entries=10)
|
||||
text_should_not_be_present = StringListField('Block change-detection if text matches', [validators.Optional(), ValidateListRegex()])
|
||||
text_should_not_be_present = StringListField('Block change-detection while text matches', [validators.Optional(), ValidateListRegex()])
|
||||
webdriver_js_execute_code = TextAreaField('Execute JavaScript before change detection', render_kw={"rows": "5"}, validators=[validators.Optional()])
|
||||
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||
@@ -400,9 +477,25 @@ class watchForm(commonSettingsForm):
|
||||
self.body.errors.append('Body must be empty when Request Method is set to GET')
|
||||
result = False
|
||||
|
||||
# Attempt to validate jinja2 templates in the URL
|
||||
from jinja2 import Environment
|
||||
# Jinja2 available in URLs along with https://pypi.org/project/jinja2-time/
|
||||
jinja2_env = Environment(extensions=['jinja2_time.TimeExtension'])
|
||||
try:
|
||||
ready_url = str(jinja2_env.from_string(self.url.data).render())
|
||||
except Exception as e:
|
||||
self.url.errors.append('Invalid template syntax')
|
||||
result = False
|
||||
return result
|
||||
|
||||
|
||||
class SingleExtraProxy(Form):
|
||||
|
||||
# maybe better to set some <script>var..
|
||||
proxy_name = StringField('Name', [validators.Optional()], render_kw={"placeholder": "Name"})
|
||||
proxy_url = StringField('Proxy URL', [validators.Optional()], render_kw={"placeholder": "socks5:// or regular proxy http://user:pass@...:3128", "size":50})
|
||||
# @todo do the validation here instead
|
||||
|
||||
# datastore.data['settings']['requests']..
|
||||
class globalSettingsRequestForm(Form):
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
@@ -410,21 +503,37 @@ class globalSettingsRequestForm(Form):
|
||||
jitter_seconds = IntegerField('Random jitter seconds ± check',
|
||||
render_kw={"style": "width: 5em;"},
|
||||
validators=[validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
extra_proxies = FieldList(FormField(SingleExtraProxy), min_entries=5)
|
||||
|
||||
def validate_extra_proxies(self, extra_validators=None):
|
||||
for e in self.data['extra_proxies']:
|
||||
if e.get('proxy_name') or e.get('proxy_url'):
|
||||
if not e.get('proxy_name','').strip() or not e.get('proxy_url','').strip():
|
||||
self.extra_proxies.errors.append('Both a name, and a Proxy URL is required.')
|
||||
return False
|
||||
|
||||
|
||||
# datastore.data['settings']['application']..
|
||||
class globalSettingsApplicationForm(commonSettingsForm):
|
||||
|
||||
base_url = StringField('Base URL', validators=[validators.Optional()])
|
||||
global_subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)])
|
||||
global_ignore_text = StringListField('Ignore Text', [ValidateListRegex()])
|
||||
ignore_whitespace = BooleanField('Ignore whitespace')
|
||||
removepassword_button = SubmitField('Remove password', render_kw={"class": "pure-button pure-button-primary"})
|
||||
empty_pages_are_a_change = BooleanField('Treat empty pages as a change?', default=False)
|
||||
render_anchor_tag_content = BooleanField('Render anchor tag content', default=False)
|
||||
fetch_backend = RadioField('Fetch Method', default="html_requests", choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
api_access_token_enabled = BooleanField('API access token security check enabled', default=True, validators=[validators.Optional()])
|
||||
base_url = StringField('Notification base URL override',
|
||||
validators=[validators.Optional()],
|
||||
render_kw={"placeholder": os.getenv('BASE_URL', 'Not set')}
|
||||
)
|
||||
empty_pages_are_a_change = BooleanField('Treat empty pages as a change?', default=False)
|
||||
fetch_backend = RadioField('Fetch Method', default="html_requests", choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
global_ignore_text = StringListField('Ignore Text', [ValidateListRegex()])
|
||||
global_subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)])
|
||||
ignore_whitespace = BooleanField('Ignore whitespace')
|
||||
password = SaltyPasswordField()
|
||||
|
||||
pager_size = IntegerField('Pager size',
|
||||
render_kw={"style": "width: 5em;"},
|
||||
validators=[validators.NumberRange(min=0,
|
||||
message="Should be atleast zero (disabled)")])
|
||||
removepassword_button = SubmitField('Remove password', render_kw={"class": "pure-button pure-button-primary"})
|
||||
render_anchor_tag_content = BooleanField('Render anchor tag content', default=False)
|
||||
shared_diff_access = BooleanField('Allow access to view diff page when password is enabled', default=False, validators=[validators.Optional()])
|
||||
filter_failure_notification_threshold_attempts = IntegerField('Number of times the filter can be missing before sending a notification',
|
||||
render_kw={"style": "width: 5em;"},
|
||||
validators=[validators.NumberRange(min=0,
|
||||
@@ -439,3 +548,8 @@ class globalSettingsForm(Form):
|
||||
requests = FormField(globalSettingsRequestForm)
|
||||
application = FormField(globalSettingsApplicationForm)
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||
|
||||
|
||||
class extractDataForm(Form):
|
||||
extract_regex = StringField('RegEx to extract', validators=[validators.Length(min=1, message="Needs a RegEx")])
|
||||
extract_submit_button = SubmitField('Extract as CSV', render_kw={"class": "pure-button pure-button-primary"})
|
||||
|
||||
@@ -1,19 +1,44 @@
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from inscriptis import get_text
|
||||
from inscriptis.model.config import ParserConfig
|
||||
from jsonpath_ng.ext import parse
|
||||
from typing import List
|
||||
from inscriptis.css_profiles import CSS_PROFILES, HtmlElement
|
||||
from inscriptis.html_properties import Display
|
||||
from inscriptis.model.config import ParserConfig
|
||||
from xml.sax.saxutils import escape as xml_escape
|
||||
import json
|
||||
import re
|
||||
|
||||
|
||||
# HTML added to be sure each result matching a filter (.example) gets converted to a new line by Inscriptis
|
||||
TEXT_FILTER_LIST_LINE_SUFFIX = "<br/>"
|
||||
TEXT_FILTER_LIST_LINE_SUFFIX = "<br>"
|
||||
|
||||
PERL_STYLE_REGEX = r'^/(.*?)/([a-z]*)?$'
|
||||
# 'price' , 'lowPrice', 'highPrice' are usually under here
|
||||
# All of those may or may not appear on different websites - I didnt find a way todo case-insensitive searching here
|
||||
LD_JSON_PRODUCT_OFFER_SELECTORS = ["json:$..offers", "json:$..Offers"]
|
||||
|
||||
class JSONNotFound(ValueError):
|
||||
def __init__(self, msg):
|
||||
ValueError.__init__(self, msg)
|
||||
|
||||
|
||||
|
||||
# Doesn't look like python supports forward slash auto enclosure in re.findall
|
||||
# So convert it to inline flag "(?i)foobar" type configuration
|
||||
def perl_style_slash_enclosed_regex_to_options(regex):
|
||||
|
||||
res = re.search(PERL_STYLE_REGEX, regex, re.IGNORECASE)
|
||||
|
||||
if res:
|
||||
flags = res.group(2) if res.group(2) else 'i'
|
||||
regex = f"(?{flags}){res.group(1)}"
|
||||
else:
|
||||
# Fall back to just ignorecase as an option
|
||||
regex = f"(?i){regex}"
|
||||
|
||||
return regex
|
||||
|
||||
# Given a CSS Rule, and a blob of HTML, return the blob of HTML that matches
|
||||
def include_filters(include_filters, html_content, append_pretty_line_formatting=False):
|
||||
soup = BeautifulSoup(html_content, "html.parser")
|
||||
@@ -46,10 +71,15 @@ def element_removal(selectors: List[str], html_content):
|
||||
|
||||
|
||||
# Return str Utf-8 of matched rules
|
||||
def xpath_filter(xpath_filter, html_content, append_pretty_line_formatting=False):
|
||||
def xpath_filter(xpath_filter, html_content, append_pretty_line_formatting=False, is_rss=False):
|
||||
from lxml import etree, html
|
||||
|
||||
tree = html.fromstring(bytes(html_content, encoding='utf-8'))
|
||||
parser = None
|
||||
if is_rss:
|
||||
# So that we can keep CDATA for cdata_in_document_to_text() to process
|
||||
parser = etree.XMLParser(strip_cdata=False)
|
||||
|
||||
tree = html.fromstring(bytes(html_content, encoding='utf-8'), parser=parser)
|
||||
html_block = ""
|
||||
|
||||
r = tree.xpath(xpath_filter.strip(), namespaces={'re': 'http://exslt.org/regular-expressions'})
|
||||
@@ -72,7 +102,6 @@ def xpath_filter(xpath_filter, html_content, append_pretty_line_formatting=False
|
||||
|
||||
return html_block
|
||||
|
||||
|
||||
# Extract/find element
|
||||
def extract_element(find='title', html_content=''):
|
||||
|
||||
@@ -127,37 +156,64 @@ def _get_stripped_text_from_json_match(match):
|
||||
|
||||
return stripped_text_from_html
|
||||
|
||||
def extract_json_as_string(content, json_filter):
|
||||
|
||||
# content - json
|
||||
# json_filter - ie json:$..price
|
||||
# ensure_is_ldjson_info_type - str "product", optional, "@type == product" (I dont know how to do that as a json selector)
|
||||
def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None):
|
||||
stripped_text_from_html = False
|
||||
|
||||
# Try to parse/filter out the JSON, if we get some parser error, then maybe it's embedded <script type=ldjson>
|
||||
# Try to parse/filter out the JSON, if we get some parser error, then maybe it's embedded within HTML tags
|
||||
try:
|
||||
stripped_text_from_html = _parse_json(json.loads(content), json_filter)
|
||||
except json.JSONDecodeError:
|
||||
|
||||
# Foreach <script json></script> blob.. just return the first that matches json_filter
|
||||
s = []
|
||||
# As a last resort, try to parse the whole <body>
|
||||
soup = BeautifulSoup(content, 'html.parser')
|
||||
bs_result = soup.findAll('script')
|
||||
|
||||
if not bs_result:
|
||||
raise JSONNotFound("No parsable JSON found in this document")
|
||||
if ensure_is_ldjson_info_type:
|
||||
bs_result = soup.findAll('script', {"type": "application/ld+json"})
|
||||
else:
|
||||
bs_result = soup.findAll('script')
|
||||
bs_result += soup.findAll('body')
|
||||
|
||||
bs_jsons = []
|
||||
for result in bs_result:
|
||||
# Skip empty tags, and things that dont even look like JSON
|
||||
if not result.string or not '{' in result.string:
|
||||
if not result.text or '{' not in result.text:
|
||||
continue
|
||||
|
||||
try:
|
||||
json_data = json.loads(result.string)
|
||||
json_data = json.loads(result.text)
|
||||
bs_jsons.append(json_data)
|
||||
except json.JSONDecodeError:
|
||||
# Just skip it
|
||||
# Skip objects which cannot be parsed
|
||||
continue
|
||||
else:
|
||||
stripped_text_from_html = _parse_json(json_data, json_filter)
|
||||
if stripped_text_from_html:
|
||||
break
|
||||
|
||||
if not bs_jsons:
|
||||
raise JSONNotFound("No parsable JSON found in this document")
|
||||
|
||||
for json_data in bs_jsons:
|
||||
stripped_text_from_html = _parse_json(json_data, json_filter)
|
||||
|
||||
if ensure_is_ldjson_info_type:
|
||||
# Could sometimes be list, string or something else random
|
||||
if isinstance(json_data, dict):
|
||||
# If it has LD JSON 'key' @type, and @type is 'product', and something was found for the search
|
||||
# (Some sites have multiple of the same ld+json @type='product', but some have the review part, some have the 'price' part)
|
||||
# @type could also be a list (Product, SubType)
|
||||
# LD_JSON auto-extract also requires some content PLUS the ldjson to be present
|
||||
# 1833 - could be either str or dict, should not be anything else
|
||||
if json_data.get('@type') and stripped_text_from_html:
|
||||
try:
|
||||
if json_data.get('@type') == str or json_data.get('@type') == dict:
|
||||
types = [json_data.get('@type')] if isinstance(json_data.get('@type'), str) else json_data.get('@type')
|
||||
if ensure_is_ldjson_info_type.lower() in [x.lower().strip() for x in types]:
|
||||
break
|
||||
except:
|
||||
continue
|
||||
|
||||
elif stripped_text_from_html:
|
||||
break
|
||||
|
||||
if not stripped_text_from_html:
|
||||
# Re 265 - Just return an empty string when filter not found
|
||||
@@ -170,50 +226,56 @@ def extract_json_as_string(content, json_filter):
|
||||
#
|
||||
# wordlist - list of regex's (str) or words (str)
|
||||
def strip_ignore_text(content, wordlist, mode="content"):
|
||||
ignore = []
|
||||
ignore_regex = []
|
||||
|
||||
# @todo check this runs case insensitive
|
||||
for k in wordlist:
|
||||
|
||||
# Is it a regex?
|
||||
if k[0] == '/':
|
||||
ignore_regex.append(k.strip(" /"))
|
||||
else:
|
||||
ignore.append(k)
|
||||
|
||||
i = 0
|
||||
output = []
|
||||
ignore_text = []
|
||||
ignore_regex = []
|
||||
ignored_line_numbers = []
|
||||
|
||||
for k in wordlist:
|
||||
# Is it a regex?
|
||||
res = re.search(PERL_STYLE_REGEX, k, re.IGNORECASE)
|
||||
if res:
|
||||
ignore_regex.append(re.compile(perl_style_slash_enclosed_regex_to_options(k)))
|
||||
else:
|
||||
ignore_text.append(k.strip())
|
||||
|
||||
for line in content.splitlines():
|
||||
i += 1
|
||||
# Always ignore blank lines in this mode. (when this function gets called)
|
||||
got_match = False
|
||||
if len(line.strip()):
|
||||
regex_matches = False
|
||||
for l in ignore_text:
|
||||
if l.lower() in line.lower():
|
||||
got_match = True
|
||||
|
||||
# if any of these match, skip
|
||||
for regex in ignore_regex:
|
||||
try:
|
||||
if re.search(regex, line, re.IGNORECASE):
|
||||
regex_matches = True
|
||||
except Exception as e:
|
||||
continue
|
||||
if not got_match:
|
||||
for r in ignore_regex:
|
||||
if r.search(line):
|
||||
got_match = True
|
||||
|
||||
if not regex_matches and not any(skip_text.lower() in line.lower() for skip_text in ignore):
|
||||
if not got_match:
|
||||
# Not ignored
|
||||
output.append(line.encode('utf8'))
|
||||
else:
|
||||
ignored_line_numbers.append(i)
|
||||
|
||||
|
||||
|
||||
# Used for finding out what to highlight
|
||||
if mode == "line numbers":
|
||||
return ignored_line_numbers
|
||||
|
||||
return "\n".encode('utf8').join(output)
|
||||
|
||||
def cdata_in_document_to_text(html_content: str, render_anchor_tag_content=False) -> str:
|
||||
pattern = '<!\[CDATA\[(\s*(?:.(?<!\]\]>)\s*)*)\]\]>'
|
||||
def repl(m):
|
||||
text = m.group(1)
|
||||
return xml_escape(html_to_text(html_content=text)).strip()
|
||||
|
||||
def html_to_text(html_content: str, render_anchor_tag_content=False) -> str:
|
||||
return re.sub(pattern, repl, html_content)
|
||||
|
||||
def html_to_text(html_content: str, render_anchor_tag_content=False, is_rss=False) -> str:
|
||||
"""Converts html string to a string with just the text. If ignoring
|
||||
rendering anchor tag content is enable, anchor tag content are also
|
||||
included in the text
|
||||
@@ -229,20 +291,46 @@ def html_to_text(html_content: str, render_anchor_tag_content=False) -> str:
|
||||
# if anchor tag content flag is set to True define a config for
|
||||
# extracting this content
|
||||
if render_anchor_tag_content:
|
||||
|
||||
parser_config = ParserConfig(
|
||||
annotation_rules={"a": ["hyperlink"]}, display_links=True
|
||||
annotation_rules={"a": ["hyperlink"]},
|
||||
display_links=True
|
||||
)
|
||||
|
||||
# otherwise set config to None
|
||||
# otherwise set config to None/default
|
||||
else:
|
||||
parser_config = None
|
||||
|
||||
# get text and annotations via inscriptis
|
||||
# RSS Mode - Inscriptis will treat `title` as something else.
|
||||
# Make it as a regular block display element (//item/title)
|
||||
# This is a bit of a hack - the real way it to use XSLT to convert it to HTML #1874
|
||||
if is_rss:
|
||||
html_content = re.sub(r'<title([\s>])', r'<h1\1', html_content)
|
||||
html_content = re.sub(r'</title>', r'</h1>', html_content)
|
||||
|
||||
text_content = get_text(html_content, config=parser_config)
|
||||
|
||||
return text_content
|
||||
|
||||
|
||||
# Does LD+JSON exist with a @type=='product' and a .price set anywhere?
|
||||
def has_ldjson_product_info(content):
|
||||
pricing_data = ''
|
||||
|
||||
try:
|
||||
if not 'application/ld+json' in content:
|
||||
return False
|
||||
|
||||
for filter in LD_JSON_PRODUCT_OFFER_SELECTORS:
|
||||
pricing_data += extract_json_as_string(content=content,
|
||||
json_filter=filter,
|
||||
ensure_is_ldjson_info_type="product")
|
||||
|
||||
except Exception as e:
|
||||
# Totally fine
|
||||
return False
|
||||
x=bool(pricing_data)
|
||||
return x
|
||||
|
||||
|
||||
def workarounds_for_obfuscations(content):
|
||||
"""
|
||||
Some sites are using sneaky tactics to make prices and other information un-renderable by Inscriptis
|
||||
@@ -257,3 +345,18 @@ def workarounds_for_obfuscations(content):
|
||||
content = re.sub('<!--\s+-->', '', content)
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def get_triggered_text(content, trigger_text):
|
||||
triggered_text = []
|
||||
result = strip_ignore_text(content=content,
|
||||
wordlist=trigger_text,
|
||||
mode="line numbers")
|
||||
|
||||
i = 1
|
||||
for p in content.splitlines():
|
||||
if i in result:
|
||||
triggered_text.append(p)
|
||||
i += 1
|
||||
|
||||
return triggered_text
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
from abc import ABC, abstractmethod
|
||||
import time
|
||||
import validators
|
||||
from wtforms import ValidationError
|
||||
|
||||
from changedetectionio.forms import validate_url
|
||||
|
||||
|
||||
class Importer():
|
||||
@@ -12,6 +15,7 @@ class Importer():
|
||||
self.new_uuids = []
|
||||
self.good = 0
|
||||
self.remaining_data = []
|
||||
self.import_profile = None
|
||||
|
||||
@abstractmethod
|
||||
def run(self,
|
||||
@@ -29,6 +33,7 @@ class import_url_list(Importer):
|
||||
data,
|
||||
flash,
|
||||
datastore,
|
||||
processor=None
|
||||
):
|
||||
|
||||
urls = data.split("\n")
|
||||
@@ -51,8 +56,13 @@ class import_url_list(Importer):
|
||||
|
||||
# Flask wtform validators wont work with basic auth, use validators package
|
||||
# Up to 5000 per batch so we dont flood the server
|
||||
if len(url) and validators.url(url.replace('source:', '')) and good < 5000:
|
||||
new_uuid = datastore.add_watch(url=url.strip(), tag=tags, write_to_disk_now=False)
|
||||
# @todo validators.url failed on local hostnames (such as referring to ourself when using browserless)
|
||||
if len(url) and 'http' in url.lower() and good < 5000:
|
||||
extras = None
|
||||
if processor:
|
||||
extras = {'processor': processor}
|
||||
new_uuid = datastore.add_watch(url=url.strip(), tag=tags, write_to_disk_now=False, extras=extras)
|
||||
|
||||
if new_uuid:
|
||||
# Straight into the queue.
|
||||
self.new_uuids.append(new_uuid)
|
||||
@@ -79,7 +89,8 @@ class import_distill_io_json(Importer):
|
||||
now = time.time()
|
||||
self.new_uuids=[]
|
||||
|
||||
|
||||
# @todo Use JSONSchema like in the API to validate here.
|
||||
|
||||
try:
|
||||
data = json.loads(data.strip())
|
||||
except json.decoder.JSONDecodeError:
|
||||
@@ -114,11 +125,8 @@ class import_distill_io_json(Importer):
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
|
||||
if d.get('tags', False):
|
||||
extras['tag'] = ", ".join(d['tags'])
|
||||
|
||||
new_uuid = datastore.add_watch(url=d['uri'].strip(),
|
||||
tag=",".join(d.get('tags', [])),
|
||||
extras=extras,
|
||||
write_to_disk_now=False)
|
||||
|
||||
@@ -128,3 +136,167 @@ class import_distill_io_json(Importer):
|
||||
good += 1
|
||||
|
||||
flash("{} Imported from Distill.io in {:.2f}s, {} Skipped.".format(len(self.new_uuids), time.time() - now, len(self.remaining_data)))
|
||||
|
||||
|
||||
class import_xlsx_wachete(Importer):
|
||||
|
||||
def run(self,
|
||||
data,
|
||||
flash,
|
||||
datastore,
|
||||
):
|
||||
|
||||
good = 0
|
||||
now = time.time()
|
||||
self.new_uuids = []
|
||||
|
||||
from openpyxl import load_workbook
|
||||
|
||||
try:
|
||||
wb = load_workbook(data)
|
||||
except Exception as e:
|
||||
# @todo correct except
|
||||
flash("Unable to read export XLSX file, something wrong with the file?", 'error')
|
||||
return
|
||||
|
||||
row_id = 2
|
||||
for row in wb.active.iter_rows(min_row=row_id):
|
||||
try:
|
||||
extras = {}
|
||||
data = {}
|
||||
for cell in row:
|
||||
if not cell.value:
|
||||
continue
|
||||
column_title = wb.active.cell(row=1, column=cell.column).value.strip().lower()
|
||||
data[column_title] = cell.value
|
||||
|
||||
# Forced switch to webdriver/playwright/etc
|
||||
dynamic_wachet = str(data.get('dynamic wachet', '')).strip().lower() # Convert bool to str to cover all cases
|
||||
# libreoffice and others can have it as =FALSE() =TRUE(), or bool(true)
|
||||
if 'true' in dynamic_wachet or dynamic_wachet == '1':
|
||||
extras['fetch_backend'] = 'html_webdriver'
|
||||
elif 'false' in dynamic_wachet or dynamic_wachet == '0':
|
||||
extras['fetch_backend'] = 'html_requests'
|
||||
|
||||
if data.get('xpath'):
|
||||
# @todo split by || ?
|
||||
extras['include_filters'] = [data.get('xpath')]
|
||||
if data.get('name'):
|
||||
extras['title'] = data.get('name').strip()
|
||||
if data.get('interval (min)'):
|
||||
minutes = int(data.get('interval (min)'))
|
||||
hours, minutes = divmod(minutes, 60)
|
||||
days, hours = divmod(hours, 24)
|
||||
weeks, days = divmod(days, 7)
|
||||
extras['time_between_check'] = {'weeks': weeks, 'days': days, 'hours': hours, 'minutes': minutes, 'seconds': 0}
|
||||
|
||||
# At minimum a URL is required.
|
||||
if data.get('url'):
|
||||
try:
|
||||
validate_url(data.get('url'))
|
||||
except ValidationError as e:
|
||||
print(">> import URL error", data.get('url'), str(e))
|
||||
flash(f"Error processing row number {row_id}, URL value was incorrect, row was skipped.", 'error')
|
||||
# Don't bother processing anything else on this row
|
||||
continue
|
||||
|
||||
new_uuid = datastore.add_watch(url=data['url'].strip(),
|
||||
extras=extras,
|
||||
tag=data.get('folder'),
|
||||
write_to_disk_now=False)
|
||||
if new_uuid:
|
||||
# Straight into the queue.
|
||||
self.new_uuids.append(new_uuid)
|
||||
good += 1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
flash(f"Error processing row number {row_id}, check all cell data types are correct, row was skipped.", 'error')
|
||||
else:
|
||||
row_id += 1
|
||||
|
||||
flash(
|
||||
"{} imported from Wachete .xlsx in {:.2f}s".format(len(self.new_uuids), time.time() - now))
|
||||
|
||||
|
||||
class import_xlsx_custom(Importer):
|
||||
|
||||
def run(self,
|
||||
data,
|
||||
flash,
|
||||
datastore,
|
||||
):
|
||||
|
||||
good = 0
|
||||
now = time.time()
|
||||
self.new_uuids = []
|
||||
|
||||
from openpyxl import load_workbook
|
||||
|
||||
try:
|
||||
wb = load_workbook(data)
|
||||
except Exception as e:
|
||||
# @todo correct except
|
||||
flash("Unable to read export XLSX file, something wrong with the file?", 'error')
|
||||
return
|
||||
|
||||
# @todo cehck atleast 2 rows, same in other method
|
||||
from .forms import validate_url
|
||||
row_i = 1
|
||||
|
||||
try:
|
||||
for row in wb.active.iter_rows():
|
||||
url = None
|
||||
tags = None
|
||||
extras = {}
|
||||
|
||||
for cell in row:
|
||||
if not self.import_profile.get(cell.col_idx):
|
||||
continue
|
||||
if not cell.value:
|
||||
continue
|
||||
|
||||
cell_map = self.import_profile.get(cell.col_idx)
|
||||
|
||||
cell_val = str(cell.value).strip() # could be bool
|
||||
|
||||
if cell_map == 'url':
|
||||
url = cell.value.strip()
|
||||
try:
|
||||
validate_url(url)
|
||||
except ValidationError as e:
|
||||
print(">> Import URL error", url, str(e))
|
||||
flash(f"Error processing row number {row_i}, URL value was incorrect, row was skipped.", 'error')
|
||||
# Don't bother processing anything else on this row
|
||||
url = None
|
||||
break
|
||||
elif cell_map == 'tag':
|
||||
tags = cell.value.strip()
|
||||
elif cell_map == 'include_filters':
|
||||
# @todo validate?
|
||||
extras['include_filters'] = [cell.value.strip()]
|
||||
elif cell_map == 'interval_minutes':
|
||||
hours, minutes = divmod(int(cell_val), 60)
|
||||
days, hours = divmod(hours, 24)
|
||||
weeks, days = divmod(days, 7)
|
||||
extras['time_between_check'] = {'weeks': weeks, 'days': days, 'hours': hours, 'minutes': minutes, 'seconds': 0}
|
||||
else:
|
||||
extras[cell_map] = cell_val
|
||||
|
||||
# At minimum a URL is required.
|
||||
if url:
|
||||
new_uuid = datastore.add_watch(url=url,
|
||||
extras=extras,
|
||||
tag=tags,
|
||||
write_to_disk_now=False)
|
||||
if new_uuid:
|
||||
# Straight into the queue.
|
||||
self.new_uuids.append(new_uuid)
|
||||
good += 1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
flash(f"Error processing row number {row_i}, check all cell data types are correct, row was skipped.", 'error')
|
||||
else:
|
||||
row_i += 1
|
||||
|
||||
flash(
|
||||
"{} imported from custom .xlsx in {:.2f}s".format(len(self.new_uuids), time.time() - now))
|
||||
|
||||
@@ -15,31 +15,35 @@ class model(dict):
|
||||
'headers': {
|
||||
},
|
||||
'requests': {
|
||||
'timeout': int(getenv("DEFAULT_SETTINGS_REQUESTS_TIMEOUT", "45")), # Default 45 seconds
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': 3, 'minutes': None, 'seconds': None},
|
||||
'extra_proxies': [], # Configurable extra proxies via the UI
|
||||
'jitter_seconds': 0,
|
||||
'proxy': None, # Preferred proxy connection
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': 3, 'minutes': None, 'seconds': None},
|
||||
'timeout': int(getenv("DEFAULT_SETTINGS_REQUESTS_TIMEOUT", "45")), # Default 45 seconds
|
||||
'workers': int(getenv("DEFAULT_SETTINGS_REQUESTS_WORKERS", "10")), # Number of threads, lower is better for slow connections
|
||||
'proxy': None # Preferred proxy connection
|
||||
},
|
||||
'application': {
|
||||
# Custom notification content
|
||||
'api_access_token_enabled': True,
|
||||
'password': False,
|
||||
'base_url' : None,
|
||||
'extract_title_as_title': False,
|
||||
'empty_pages_are_a_change': False,
|
||||
'extract_title_as_title': False,
|
||||
'fetch_backend': getenv("DEFAULT_FETCH_BACKEND", "html_requests"),
|
||||
'filter_failure_notification_threshold_attempts': _FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT,
|
||||
'global_ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
'global_subtractive_selectors': [],
|
||||
'ignore_whitespace': True,
|
||||
'render_anchor_tag_content': False,
|
||||
'notification_urls': [], # Apprise URL list
|
||||
# Custom notification content
|
||||
'notification_title': default_notification_title,
|
||||
'notification_body': default_notification_body,
|
||||
'notification_format': default_notification_format,
|
||||
'notification_title': default_notification_title,
|
||||
'notification_urls': [], # Apprise URL list
|
||||
'pager_size': 50,
|
||||
'password': False,
|
||||
'render_anchor_tag_content': False,
|
||||
'schema_version' : 0,
|
||||
'webdriver_delay': None # Extra delay in seconds before extracting text
|
||||
'shared_diff_access': False,
|
||||
'webdriver_delay': None , # Extra delay in seconds before extracting text
|
||||
'tags': {} #@todo use Tag.model initialisers
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -47,3 +51,15 @@ class model(dict):
|
||||
def __init__(self, *arg, **kw):
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
self.update(self.base_config)
|
||||
|
||||
|
||||
def parse_headers_from_text_file(filepath):
|
||||
headers = {}
|
||||
with open(filepath, 'r') as f:
|
||||
for l in f.readlines():
|
||||
l = l.strip()
|
||||
if not l.startswith('#') and ':' in l:
|
||||
(k, v) = l.split(':')
|
||||
headers[k.strip()] = v.strip()
|
||||
|
||||
return headers
|
||||
19
changedetectionio/model/Tag.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from .Watch import base_config
|
||||
import uuid
|
||||
|
||||
class model(dict):
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
|
||||
self.update(base_config)
|
||||
|
||||
self['uuid'] = str(uuid.uuid4())
|
||||
|
||||
if kw.get('default'):
|
||||
self.update(kw['default'])
|
||||
del kw['default']
|
||||
|
||||
|
||||
# Goes at the end so we update the default object with the initialiser
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
from distutils.util import strtobool
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
|
||||
# Allowable protocols, protects against javascript: etc
|
||||
# file:// is further checked by ALLOW_FILE_URI
|
||||
SAFE_PROTOCOL_REGEX='^(http|https|ftp|file):'
|
||||
|
||||
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
|
||||
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
|
||||
@@ -11,57 +17,83 @@ from changedetectionio.notification import (
|
||||
default_notification_format_for_watch
|
||||
)
|
||||
|
||||
base_config = {
|
||||
'body': None,
|
||||
'browser_steps_last_error_step': None,
|
||||
'check_unique_lines': False, # On change-detected, compare against all history if its something new
|
||||
'check_count': 0,
|
||||
'date_created': None,
|
||||
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
|
||||
'extract_text': [], # Extract text by regex after filters
|
||||
'extract_title_as_title': False,
|
||||
'fetch_backend': 'system', # plaintext, playwright etc
|
||||
'fetch_time': 0.0,
|
||||
'processor': 'text_json_diff', # could be restock_diff or others from .processors
|
||||
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
|
||||
'filter_text_added': True,
|
||||
'filter_text_replaced': True,
|
||||
'filter_text_removed': True,
|
||||
'has_ldjson_price_data': None,
|
||||
'track_ldjson_price_data': None,
|
||||
'headers': {}, # Extra headers to send
|
||||
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
'in_stock_only' : True, # Only trigger change on going to instock from out-of-stock
|
||||
'include_filters': [],
|
||||
'last_checked': 0,
|
||||
'last_error': False,
|
||||
'last_viewed': 0, # history key value of the last viewed via the [diff] link
|
||||
'method': 'GET',
|
||||
# Custom notification content
|
||||
'notification_body': None,
|
||||
'notification_format': default_notification_format_for_watch,
|
||||
'notification_muted': False,
|
||||
'notification_title': None,
|
||||
'notification_screenshot': False, # Include the latest screenshot if available and supported by the apprise URL
|
||||
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
|
||||
'paused': False,
|
||||
'previous_md5': False,
|
||||
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
|
||||
'proxy': None, # Preferred proxy connection
|
||||
'subtractive_selectors': [],
|
||||
'tag': '', # Old system of text name for a tag, to be removed
|
||||
'tags': [], # list of UUIDs to App.Tags
|
||||
'text_should_not_be_present': [], # Text that should not present
|
||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||
# Requires setting to None on submit if it's the same as the default
|
||||
# Should be all None by default, so we use the system default in this case.
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None},
|
||||
'title': None,
|
||||
'trigger_text': [], # List of text or regex to wait for until a change is detected
|
||||
'url': '',
|
||||
'uuid': str(uuid.uuid4()),
|
||||
'webdriver_delay': None,
|
||||
'webdriver_js_execute_code': None, # Run before change-detection
|
||||
}
|
||||
|
||||
|
||||
def is_safe_url(test_url):
|
||||
# See https://github.com/dgtlmoon/changedetection.io/issues/1358
|
||||
|
||||
# Remove 'source:' prefix so we dont get 'source:javascript:' etc
|
||||
# 'source:' is a valid way to tell us to return the source
|
||||
|
||||
r = re.compile(re.escape('source:'), re.IGNORECASE)
|
||||
test_url = r.sub('', test_url)
|
||||
|
||||
pattern = re.compile(os.getenv('SAFE_PROTOCOL_REGEX', SAFE_PROTOCOL_REGEX), re.IGNORECASE)
|
||||
if not pattern.match(test_url.strip()):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
class model(dict):
|
||||
__newest_history_key = None
|
||||
__history_n=0
|
||||
__base_config = {
|
||||
#'history': {}, # Dict of timestamp and output stripped filename (removed)
|
||||
#'newest_history_key': 0, (removed, taken from history.txt index)
|
||||
'body': None,
|
||||
'check_unique_lines': False, # On change-detected, compare against all history if its something new
|
||||
'check_count': 0,
|
||||
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
|
||||
'extract_text': [], # Extract text by regex after filters
|
||||
'extract_title_as_title': False,
|
||||
'fetch_backend': None,
|
||||
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
|
||||
'headers': {}, # Extra headers to send
|
||||
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
'include_filters': [],
|
||||
'last_checked': 0,
|
||||
'last_error': False,
|
||||
'last_viewed': 0, # history key value of the last viewed via the [diff] link
|
||||
'method': 'GET',
|
||||
# Custom notification content
|
||||
'notification_body': None,
|
||||
'notification_format': default_notification_format_for_watch,
|
||||
'notification_muted': False,
|
||||
'notification_title': None,
|
||||
'notification_screenshot': False, # Include the latest screenshot if available and supported by the apprise URL
|
||||
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
|
||||
'paused': False,
|
||||
'previous_md5': False,
|
||||
'proxy': None, # Preferred proxy connection
|
||||
'subtractive_selectors': [],
|
||||
'tag': None,
|
||||
'text_should_not_be_present': [], # Text that should not present
|
||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||
# Requires setting to None on submit if it's the same as the default
|
||||
# Should be all None by default, so we use the system default in this case.
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None},
|
||||
'title': None,
|
||||
'trigger_text': [], # List of text or regex to wait for until a change is detected
|
||||
'url': None,
|
||||
'uuid': str(uuid.uuid4()),
|
||||
'webdriver_delay': None,
|
||||
'webdriver_js_execute_code': None, # Run before change-detection
|
||||
}
|
||||
__history_n = 0
|
||||
jitter_seconds = 0
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
|
||||
self.update(self.__base_config)
|
||||
self.update(base_config)
|
||||
self.__datastore_path = kw['datastore_path']
|
||||
|
||||
self['uuid'] = str(uuid.uuid4())
|
||||
@@ -92,20 +124,53 @@ class model(dict):
|
||||
|
||||
@property
|
||||
def link(self):
|
||||
|
||||
url = self.get('url', '')
|
||||
if not is_safe_url(url):
|
||||
return 'DISABLED'
|
||||
|
||||
ready_url = url
|
||||
if '{%' in url or '{{' in url:
|
||||
from jinja2 import Environment
|
||||
# Jinja2 available in URLs along with https://pypi.org/project/jinja2-time/
|
||||
jinja2_env = Environment(extensions=['jinja2_time.TimeExtension'])
|
||||
return str(jinja2_env.from_string(url).render())
|
||||
return url
|
||||
try:
|
||||
ready_url = str(jinja2_env.from_string(url).render())
|
||||
except Exception as e:
|
||||
from flask import (
|
||||
flash, Markup, url_for
|
||||
)
|
||||
message = Markup('<a href="{}#general">The URL {} is invalid and cannot be used, click to edit</a>'.format(
|
||||
url_for('edit_page', uuid=self.get('uuid')), self.get('url', '')))
|
||||
flash(message, 'error')
|
||||
return ''
|
||||
|
||||
return ready_url
|
||||
|
||||
@property
|
||||
def get_fetch_backend(self):
|
||||
"""
|
||||
Like just using the `fetch_backend` key but there could be some logic
|
||||
:return:
|
||||
"""
|
||||
# Maybe also if is_image etc?
|
||||
# This is because chrome/playwright wont render the PDF in the browser and we will just fetch it and use pdf2html to see the text.
|
||||
if self.is_pdf:
|
||||
return 'html_requests'
|
||||
|
||||
return self.get('fetch_backend')
|
||||
|
||||
@property
|
||||
def is_pdf(self):
|
||||
# content_type field is set in the future
|
||||
# https://github.com/dgtlmoon/changedetection.io/issues/1392
|
||||
# Not sure the best logic here
|
||||
return self.get('url', '').lower().endswith('.pdf') or 'pdf' in self.get('content_type', '').lower()
|
||||
|
||||
@property
|
||||
def label(self):
|
||||
# Used for sorting
|
||||
if self['title']:
|
||||
return self['title']
|
||||
return self['url']
|
||||
return self.get('title') if self.get('title') else self.get('url')
|
||||
|
||||
@property
|
||||
def last_changed(self):
|
||||
@@ -182,9 +247,32 @@ class model(dict):
|
||||
bump = self.history
|
||||
return self.__newest_history_key
|
||||
|
||||
def get_history_snapshot(self, timestamp):
|
||||
import brotli
|
||||
filepath = self.history[timestamp]
|
||||
|
||||
# See if a brotli versions exists and switch to that
|
||||
if not filepath.endswith('.br') and os.path.isfile(f"{filepath}.br"):
|
||||
filepath = f"{filepath}.br"
|
||||
|
||||
# OR in the backup case that the .br does not exist, but the plain one does
|
||||
if filepath.endswith('.br') and not os.path.isfile(filepath):
|
||||
if os.path.isfile(filepath.replace('.br', '')):
|
||||
filepath = filepath.replace('.br', '')
|
||||
|
||||
if filepath.endswith('.br'):
|
||||
# Brotli doesnt have a fileheader to detect it, so we rely on filename
|
||||
# https://www.rfc-editor.org/rfc/rfc7932
|
||||
with open(filepath, 'rb') as f:
|
||||
return(brotli.decompress(f.read()).decode('utf-8'))
|
||||
|
||||
with open(filepath, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
return f.read()
|
||||
|
||||
# Save some text file to the appropriate path and bump the history
|
||||
# result_obj from fetch_site_status.run()
|
||||
def save_history_text(self, contents, timestamp):
|
||||
def save_history_text(self, contents, timestamp, snapshot_id):
|
||||
import brotli
|
||||
|
||||
self.ensure_data_dir_exists()
|
||||
|
||||
@@ -193,13 +281,21 @@ class model(dict):
|
||||
if self.__newest_history_key and int(timestamp) == int(self.__newest_history_key):
|
||||
time.sleep(timestamp - self.__newest_history_key)
|
||||
|
||||
snapshot_fname = "{}.txt".format(str(uuid.uuid4()))
|
||||
threshold = int(os.getenv('SNAPSHOT_BROTLI_COMPRESSION_THRESHOLD', 1024))
|
||||
skip_brotli = strtobool(os.getenv('DISABLE_BROTLI_TEXT_SNAPSHOT', 'False'))
|
||||
|
||||
# in /diff/ and /preview/ we are going to assume for now that it's UTF-8 when reading
|
||||
# most sites are utf-8 and some are even broken utf-8
|
||||
with open(os.path.join(self.watch_data_dir, snapshot_fname), 'wb') as f:
|
||||
f.write(contents)
|
||||
f.close()
|
||||
if not skip_brotli and len(contents) > threshold:
|
||||
snapshot_fname = f"{snapshot_id}.txt.br"
|
||||
dest = os.path.join(self.watch_data_dir, snapshot_fname)
|
||||
if not os.path.exists(dest):
|
||||
with open(dest, 'wb') as f:
|
||||
f.write(brotli.compress(contents, mode=brotli.MODE_TEXT))
|
||||
else:
|
||||
snapshot_fname = f"{snapshot_id}.txt"
|
||||
dest = os.path.join(self.watch_data_dir, snapshot_fname)
|
||||
if not os.path.exists(dest):
|
||||
with open(dest, 'wb') as f:
|
||||
f.write(contents)
|
||||
|
||||
# Append to index
|
||||
# @todo check last char was \n
|
||||
@@ -236,7 +332,8 @@ class model(dict):
|
||||
# Compare each lines (set) against each history text file (set) looking for something new..
|
||||
existing_history = set({})
|
||||
for k, v in self.history.items():
|
||||
alist = set([line.decode('utf-8').strip().lower() for line in open(v, 'rb')])
|
||||
content = self.get_history_snapshot(k)
|
||||
alist = set([line.strip().lower() for line in content.splitlines()])
|
||||
existing_history = existing_history.union(alist)
|
||||
|
||||
# Check that everything in local_lines(new stuff) already exists in existing_history - it should
|
||||
@@ -251,17 +348,6 @@ class model(dict):
|
||||
# False is not an option for AppRise, must be type None
|
||||
return None
|
||||
|
||||
def get_screenshot_as_jpeg(self):
|
||||
|
||||
# Created by save_screenshot()
|
||||
fname = os.path.join(self.watch_data_dir, "last-screenshot.jpg")
|
||||
if os.path.isfile(fname):
|
||||
return fname
|
||||
|
||||
# False is not an option for AppRise, must be type None
|
||||
return None
|
||||
|
||||
|
||||
def __get_file_ctime(self, filename):
|
||||
fname = os.path.join(self.watch_data_dir, filename)
|
||||
if os.path.isfile(fname):
|
||||
@@ -307,3 +393,112 @@ class model(dict):
|
||||
if os.path.isfile(fname):
|
||||
return fname
|
||||
return False
|
||||
|
||||
|
||||
def pause(self):
|
||||
self['paused'] = True
|
||||
|
||||
def unpause(self):
|
||||
self['paused'] = False
|
||||
|
||||
def toggle_pause(self):
|
||||
self['paused'] ^= True
|
||||
|
||||
def mute(self):
|
||||
self['notification_muted'] = True
|
||||
|
||||
def unmute(self):
|
||||
self['notification_muted'] = False
|
||||
|
||||
def toggle_mute(self):
|
||||
self['notification_muted'] ^= True
|
||||
|
||||
def extract_regex_from_all_history(self, regex):
|
||||
import csv
|
||||
import re
|
||||
import datetime
|
||||
csv_output_filename = False
|
||||
csv_writer = False
|
||||
f = None
|
||||
|
||||
# self.history will be keyed with the full path
|
||||
for k, fname in self.history.items():
|
||||
if os.path.isfile(fname):
|
||||
if True:
|
||||
contents = self.get_history_snapshot(k)
|
||||
res = re.findall(regex, contents, re.MULTILINE)
|
||||
if res:
|
||||
if not csv_writer:
|
||||
# A file on the disk can be transferred much faster via flask than a string reply
|
||||
csv_output_filename = 'report.csv'
|
||||
f = open(os.path.join(self.watch_data_dir, csv_output_filename), 'w')
|
||||
# @todo some headers in the future
|
||||
#fieldnames = ['Epoch seconds', 'Date']
|
||||
csv_writer = csv.writer(f,
|
||||
delimiter=',',
|
||||
quotechar='"',
|
||||
quoting=csv.QUOTE_MINIMAL,
|
||||
#fieldnames=fieldnames
|
||||
)
|
||||
csv_writer.writerow(['Epoch seconds', 'Date'])
|
||||
# csv_writer.writeheader()
|
||||
|
||||
date_str = datetime.datetime.fromtimestamp(int(k)).strftime('%Y-%m-%d %H:%M:%S')
|
||||
for r in res:
|
||||
row = [k, date_str]
|
||||
if isinstance(r, str):
|
||||
row.append(r)
|
||||
else:
|
||||
row+=r
|
||||
csv_writer.writerow(row)
|
||||
|
||||
if f:
|
||||
f.close()
|
||||
|
||||
return csv_output_filename
|
||||
|
||||
|
||||
def has_special_diff_filter_options_set(self):
|
||||
|
||||
# All False - nothing would be done, so act like it's not processable
|
||||
if not self.get('filter_text_added', True) and not self.get('filter_text_replaced', True) and not self.get('filter_text_removed', True):
|
||||
return False
|
||||
|
||||
# Or one is set
|
||||
if not self.get('filter_text_added', True) or not self.get('filter_text_replaced', True) or not self.get('filter_text_removed', True):
|
||||
return True
|
||||
|
||||
# None is set
|
||||
return False
|
||||
|
||||
|
||||
def get_last_fetched_before_filters(self):
|
||||
import brotli
|
||||
filepath = os.path.join(self.watch_data_dir, 'last-fetched.br')
|
||||
|
||||
if not os.path.isfile(filepath):
|
||||
# If a previous attempt doesnt yet exist, just snarf the previous snapshot instead
|
||||
dates = list(self.history.keys())
|
||||
if len(dates):
|
||||
return self.get_history_snapshot(dates[-1])
|
||||
else:
|
||||
return ''
|
||||
|
||||
with open(filepath, 'rb') as f:
|
||||
return(brotli.decompress(f.read()).decode('utf-8'))
|
||||
|
||||
def save_last_fetched_before_filters(self, contents):
|
||||
import brotli
|
||||
filepath = os.path.join(self.watch_data_dir, 'last-fetched.br')
|
||||
with open(filepath, 'wb') as f:
|
||||
f.write(brotli.compress(contents, mode=brotli.MODE_TEXT))
|
||||
|
||||
@property
|
||||
def get_browsersteps_available_screenshots(self):
|
||||
"For knowing which screenshots are available to show the user in BrowserSteps UI"
|
||||
available = []
|
||||
for f in Path(self.watch_data_dir).glob('step_before-*.jpeg'):
|
||||
step_n=re.search(r'step_before-(\d+)', f.name)
|
||||
if step_n:
|
||||
available.append(step_n.group(1))
|
||||
return available
|
||||
|
||||
@@ -1,23 +1,29 @@
|
||||
import apprise
|
||||
from jinja2 import Environment, BaseLoader
|
||||
from apprise import NotifyFormat
|
||||
import json
|
||||
|
||||
valid_tokens = {
|
||||
'base_url': '',
|
||||
'watch_url': '',
|
||||
'watch_uuid': '',
|
||||
'watch_title': '',
|
||||
'watch_tag': '',
|
||||
'current_snapshot': '',
|
||||
'diff': '',
|
||||
'diff_added': '',
|
||||
'diff_full': '',
|
||||
'diff_patch': '',
|
||||
'diff_removed': '',
|
||||
'diff_url': '',
|
||||
'preview_url': '',
|
||||
'current_snapshot': ''
|
||||
'triggered_text': '',
|
||||
'watch_tag': '',
|
||||
'watch_title': '',
|
||||
'watch_url': '',
|
||||
'watch_uuid': '',
|
||||
}
|
||||
|
||||
default_notification_format_for_watch = 'System default'
|
||||
default_notification_format = 'Text'
|
||||
default_notification_body = '{watch_url} had a change.\n---\n{diff}\n---\n'
|
||||
default_notification_title = 'ChangeDetection.io Notification - {watch_url}'
|
||||
default_notification_body = '{{watch_url}} had a change.\n---\n{{diff}}\n---\n'
|
||||
default_notification_title = 'ChangeDetection.io Notification - {{watch_url}}'
|
||||
|
||||
valid_notification_formats = {
|
||||
'Text': NotifyFormat.TEXT,
|
||||
@@ -27,24 +33,72 @@ valid_notification_formats = {
|
||||
default_notification_format_for_watch: default_notification_format_for_watch
|
||||
}
|
||||
|
||||
def process_notification(n_object, datastore):
|
||||
# include the decorator
|
||||
from apprise.decorators import notify
|
||||
|
||||
# Get the notification body from datastore
|
||||
n_body = n_object.get('notification_body', default_notification_body)
|
||||
n_title = n_object.get('notification_title', default_notification_title)
|
||||
n_format = valid_notification_formats.get(
|
||||
n_object['notification_format'],
|
||||
valid_notification_formats[default_notification_format],
|
||||
)
|
||||
@notify(on="delete")
|
||||
@notify(on="deletes")
|
||||
@notify(on="get")
|
||||
@notify(on="gets")
|
||||
@notify(on="post")
|
||||
@notify(on="posts")
|
||||
@notify(on="put")
|
||||
@notify(on="puts")
|
||||
def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs):
|
||||
import requests
|
||||
url = kwargs['meta'].get('url')
|
||||
|
||||
if url.startswith('post'):
|
||||
r = requests.post
|
||||
elif url.startswith('get'):
|
||||
r = requests.get
|
||||
elif url.startswith('put'):
|
||||
r = requests.put
|
||||
elif url.startswith('delete'):
|
||||
r = requests.delete
|
||||
|
||||
url = url.replace('post://', 'http://')
|
||||
url = url.replace('posts://', 'https://')
|
||||
url = url.replace('put://', 'http://')
|
||||
url = url.replace('puts://', 'https://')
|
||||
url = url.replace('get://', 'http://')
|
||||
url = url.replace('gets://', 'https://')
|
||||
url = url.replace('put://', 'http://')
|
||||
url = url.replace('puts://', 'https://')
|
||||
url = url.replace('delete://', 'http://')
|
||||
url = url.replace('deletes://', 'https://')
|
||||
|
||||
# Try to auto-guess if it's JSON
|
||||
headers = {}
|
||||
try:
|
||||
json.loads(body)
|
||||
headers = {'Content-Type': 'application/json; charset=utf-8'}
|
||||
except ValueError as e:
|
||||
pass
|
||||
|
||||
|
||||
r(url, headers=headers, data=body)
|
||||
|
||||
|
||||
def process_notification(n_object, datastore):
|
||||
|
||||
# Insert variables into the notification content
|
||||
notification_parameters = create_notification_parameters(n_object, datastore)
|
||||
|
||||
for n_k in notification_parameters:
|
||||
token = '{' + n_k + '}'
|
||||
val = notification_parameters[n_k]
|
||||
n_title = n_title.replace(token, val)
|
||||
n_body = n_body.replace(token, val)
|
||||
# Get the notification body from datastore
|
||||
jinja2_env = Environment(loader=BaseLoader)
|
||||
n_body = jinja2_env.from_string(n_object.get('notification_body', default_notification_body)).render(**notification_parameters)
|
||||
n_title = jinja2_env.from_string(n_object.get('notification_title', default_notification_title)).render(**notification_parameters)
|
||||
n_format = valid_notification_formats.get(
|
||||
n_object.get('notification_format', default_notification_format),
|
||||
valid_notification_formats[default_notification_format],
|
||||
)
|
||||
|
||||
# If we arrived with 'System default' then look it up
|
||||
if n_format == default_notification_format_for_watch and datastore.data['settings']['application'].get('notification_format') != default_notification_format_for_watch:
|
||||
# Initially text or whatever
|
||||
n_format = datastore.data['settings']['application'].get('notification_format', valid_notification_formats[default_notification_format])
|
||||
|
||||
|
||||
# https://github.com/caronc/apprise/wiki/Development_LogCapture
|
||||
# Anything higher than or equal to WARNING (which covers things like Connection errors)
|
||||
@@ -53,6 +107,7 @@ def process_notification(n_object, datastore):
|
||||
sent_objs=[]
|
||||
from .apprise_asset import asset
|
||||
for url in n_object['notification_urls']:
|
||||
url = jinja2_env.from_string(url).render(**notification_parameters)
|
||||
apobj = apprise.Apprise(debug=True, asset=asset)
|
||||
url = url.strip()
|
||||
if len(url):
|
||||
@@ -66,14 +121,19 @@ def process_notification(n_object, datastore):
|
||||
|
||||
# So if no avatar_url is specified, add one so it can be correctly calculated into the total payload
|
||||
k = '?' if not '?' in url else '&'
|
||||
if not 'avatar_url' in url and not url.startswith('mail'):
|
||||
if not 'avatar_url' in url \
|
||||
and not url.startswith('mail') \
|
||||
and not url.startswith('post') \
|
||||
and not url.startswith('get') \
|
||||
and not url.startswith('delete') \
|
||||
and not url.startswith('put'):
|
||||
url += k + 'avatar_url=https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png'
|
||||
|
||||
if url.startswith('tgram://'):
|
||||
# Telegram only supports a limit subset of HTML, remove the '<br/>' we place in.
|
||||
# Telegram only supports a limit subset of HTML, remove the '<br>' we place in.
|
||||
# re https://github.com/dgtlmoon/changedetection.io/issues/555
|
||||
# @todo re-use an existing library we have already imported to strip all non-allowed tags
|
||||
n_body = n_body.replace('<br/>', '\n')
|
||||
n_body = n_body.replace('<br>', '\n')
|
||||
n_body = n_body.replace('</br>', '\n')
|
||||
# real limit is 4096, but minus some for extra metadata
|
||||
payload_max_size = 3600
|
||||
@@ -92,9 +152,12 @@ def process_notification(n_object, datastore):
|
||||
# Apprise will default to HTML, so we need to override it
|
||||
# So that whats' generated in n_body is in line with what is going to be sent.
|
||||
# https://github.com/caronc/apprise/issues/633#issuecomment-1191449321
|
||||
if not 'format=' in url and (n_format == 'text' or n_format == 'markdown'):
|
||||
if not 'format=' in url and (n_format == 'Text' or n_format == 'Markdown'):
|
||||
prefix = '?' if not '?' in url else '&'
|
||||
# Apprise format is lowercase text https://github.com/caronc/apprise/issues/633
|
||||
n_format = n_format.tolower()
|
||||
url = "{}{}format={}".format(url, prefix, n_format)
|
||||
# If n_format == HTML, then apprise email should default to text/html and we should be sending HTML only
|
||||
|
||||
apobj.add(url)
|
||||
|
||||
@@ -115,7 +178,7 @@ def process_notification(n_object, datastore):
|
||||
log_value = logs.getvalue()
|
||||
if log_value and 'WARNING' in log_value or 'ERROR' in log_value:
|
||||
raise Exception(log_value)
|
||||
|
||||
|
||||
sent_objs.append({'title': n_title,
|
||||
'body': n_body,
|
||||
'url' : url,
|
||||
@@ -133,22 +196,23 @@ def create_notification_parameters(n_object, datastore):
|
||||
uuid = n_object['uuid'] if 'uuid' in n_object else ''
|
||||
|
||||
if uuid != '':
|
||||
watch_title = datastore.data['watching'][uuid]['title']
|
||||
watch_tag = datastore.data['watching'][uuid]['tag']
|
||||
watch_title = datastore.data['watching'][uuid].get('title', '')
|
||||
tag_list = []
|
||||
tags = datastore.get_all_tags_for_watch(uuid)
|
||||
if tags:
|
||||
for tag_uuid, tag in tags.items():
|
||||
tag_list.append(tag.get('title'))
|
||||
watch_tag = ', '.join(tag_list)
|
||||
else:
|
||||
watch_title = 'Change Detection'
|
||||
watch_tag = ''
|
||||
|
||||
# Create URLs to customise the notification with
|
||||
base_url = datastore.data['settings']['application']['base_url']
|
||||
# active_base_url - set in store.py data property
|
||||
base_url = datastore.data['settings']['application'].get('active_base_url')
|
||||
|
||||
watch_url = n_object['watch_url']
|
||||
|
||||
# Re #148 - Some people have just {base_url} in the body or title, but this may break some notification services
|
||||
# like 'Join', so it's always best to atleast set something obvious so that they are not broken.
|
||||
if base_url == '':
|
||||
base_url = "<base-url-env-var-not-set>"
|
||||
|
||||
diff_url = "{}/diff/{}".format(base_url, uuid)
|
||||
preview_url = "{}/preview/{}".format(base_url, uuid)
|
||||
|
||||
@@ -158,16 +222,20 @@ def create_notification_parameters(n_object, datastore):
|
||||
# Valid_tokens also used as a field validator
|
||||
tokens.update(
|
||||
{
|
||||
'base_url': base_url if base_url is not None else '',
|
||||
'base_url': base_url,
|
||||
'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else '',
|
||||
'diff': n_object.get('diff', ''), # Null default in the case we use a test
|
||||
'diff_added': n_object.get('diff_added', ''), # Null default in the case we use a test
|
||||
'diff_full': n_object.get('diff_full', ''), # Null default in the case we use a test
|
||||
'diff_patch': n_object.get('diff_patch', ''), # Null default in the case we use a test
|
||||
'diff_removed': n_object.get('diff_removed', ''), # Null default in the case we use a test
|
||||
'diff_url': diff_url,
|
||||
'preview_url': preview_url,
|
||||
'triggered_text': n_object.get('triggered_text', ''),
|
||||
'watch_tag': watch_tag if watch_tag is not None else '',
|
||||
'watch_title': watch_title if watch_title is not None else '',
|
||||
'watch_url': watch_url,
|
||||
'watch_uuid': uuid,
|
||||
'watch_title': watch_title if watch_title is not None else '',
|
||||
'watch_tag': watch_tag if watch_tag is not None else '',
|
||||
'diff_url': diff_url,
|
||||
'diff': n_object.get('diff', ''), # Null default in the case we use a test
|
||||
'diff_full': n_object.get('diff_full', ''), # Null default in the case we use a test
|
||||
'preview_url': preview_url,
|
||||
'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else ''
|
||||
})
|
||||
|
||||
return tokens
|
||||
|
||||
11
changedetectionio/processors/README.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# Change detection post-processors
|
||||
|
||||
The concept here is to be able to switch between different domain specific problems to solve.
|
||||
|
||||
- `text_json_diff` The traditional text and JSON comparison handler
|
||||
- `restock_diff` Only cares about detecting if a product looks like it has some text that suggests that it's out of stock, otherwise assumes that it's in stock.
|
||||
|
||||
Some suggestions for the future
|
||||
|
||||
- `graphical`
|
||||
- `restock_and_price` - extract price AND stock text
|
||||
24
changedetectionio/processors/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from abc import abstractmethod
|
||||
import hashlib
|
||||
|
||||
|
||||
class difference_detection_processor():
|
||||
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@abstractmethod
|
||||
def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
some_data = 'xxxxx'
|
||||
update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()
|
||||
changed_detected = False
|
||||
return changed_detected, update_obj, ''.encode('utf-8')
|
||||
|
||||
|
||||
def available_processors():
|
||||
from . import restock_diff, text_json_diff
|
||||
x=[('text_json_diff', text_json_diff.name), ('restock_diff', restock_diff.name)]
|
||||
# @todo Make this smarter with introspection of sorts.
|
||||
return x
|
||||
131
changedetectionio/processors/restock_diff.py
Normal file
@@ -0,0 +1,131 @@
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import urllib3
|
||||
from . import difference_detection_processor
|
||||
from changedetectionio import content_fetcher
|
||||
from copy import deepcopy
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
name = 'Re-stock detection for single product pages'
|
||||
description = 'Detects if the product goes back to in-stock'
|
||||
|
||||
class UnableToExtractRestockData(Exception):
|
||||
def __init__(self, status_code):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
return
|
||||
|
||||
class perform_site_check(difference_detection_processor):
|
||||
screenshot = None
|
||||
xpath_data = None
|
||||
|
||||
def __init__(self, *args, datastore, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.datastore = datastore
|
||||
|
||||
def run(self, uuid, skip_when_checksum_same=True):
|
||||
|
||||
# DeepCopy so we can be sure we don't accidently change anything by reference
|
||||
watch = deepcopy(self.datastore.data['watching'].get(uuid))
|
||||
|
||||
if not watch:
|
||||
raise Exception("Watch no longer exists.")
|
||||
|
||||
# Protect against file:// access
|
||||
if re.search(r'^file', watch.get('url', ''), re.IGNORECASE) and not os.getenv('ALLOW_FILE_URI', False):
|
||||
raise Exception(
|
||||
"file:// type access is denied for security reasons."
|
||||
)
|
||||
|
||||
# Unset any existing notification error
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
|
||||
request_headers = watch.get('headers', [])
|
||||
request_headers.update(self.datastore.get_all_base_headers())
|
||||
request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=uuid))
|
||||
|
||||
# https://github.com/psf/requests/issues/4525
|
||||
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
||||
# do this by accident.
|
||||
if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']:
|
||||
request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
|
||||
|
||||
timeout = self.datastore.data['settings']['requests'].get('timeout')
|
||||
|
||||
url = watch.link
|
||||
|
||||
request_body = self.datastore.data['watching'][uuid].get('body')
|
||||
request_method = self.datastore.data['watching'][uuid].get('method')
|
||||
ignore_status_codes = self.datastore.data['watching'][uuid].get('ignore_status_codes', False)
|
||||
|
||||
# Pluggable content fetcher
|
||||
prefer_backend = watch.get_fetch_backend
|
||||
if not prefer_backend or prefer_backend == 'system':
|
||||
prefer_backend = self.datastore.data['settings']['application']['fetch_backend']
|
||||
|
||||
if hasattr(content_fetcher, prefer_backend):
|
||||
klass = getattr(content_fetcher, prefer_backend)
|
||||
else:
|
||||
# If the klass doesnt exist, just use a default
|
||||
klass = getattr(content_fetcher, "html_requests")
|
||||
|
||||
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
||||
proxy_url = None
|
||||
if proxy_id:
|
||||
proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
|
||||
print("UUID {} Using proxy {}".format(uuid, proxy_url))
|
||||
|
||||
fetcher = klass(proxy_override=proxy_url)
|
||||
|
||||
# Configurable per-watch or global extra delay before extracting text (for webDriver types)
|
||||
system_webdriver_delay = self.datastore.data['settings']['application'].get('webdriver_delay', None)
|
||||
if watch['webdriver_delay'] is not None:
|
||||
fetcher.render_extract_delay = watch.get('webdriver_delay')
|
||||
elif system_webdriver_delay is not None:
|
||||
fetcher.render_extract_delay = system_webdriver_delay
|
||||
|
||||
# Could be removed if requests/plaintext could also return some info?
|
||||
if prefer_backend != 'html_webdriver':
|
||||
raise Exception("Re-stock detection requires Chrome or compatible webdriver/playwright fetcher to work")
|
||||
|
||||
if watch.get('webdriver_js_execute_code') is not None and watch.get('webdriver_js_execute_code').strip():
|
||||
fetcher.webdriver_js_execute_code = watch.get('webdriver_js_execute_code')
|
||||
|
||||
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'))
|
||||
fetcher.quit()
|
||||
|
||||
self.screenshot = fetcher.screenshot
|
||||
self.xpath_data = fetcher.xpath_data
|
||||
|
||||
# Track the content type
|
||||
update_obj['content_type'] = fetcher.headers.get('Content-Type', '')
|
||||
update_obj["last_check_status"] = fetcher.get_last_status_code()
|
||||
|
||||
# Main detection method
|
||||
fetched_md5 = None
|
||||
if fetcher.instock_data:
|
||||
fetched_md5 = hashlib.md5(fetcher.instock_data.encode('utf-8')).hexdigest()
|
||||
# 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold.
|
||||
update_obj["in_stock"] = True if fetcher.instock_data == 'Possibly in stock' else False
|
||||
else:
|
||||
raise UnableToExtractRestockData(status_code=fetcher.status_code)
|
||||
|
||||
# The main thing that all this at the moment comes down to :)
|
||||
changed_detected = False
|
||||
|
||||
if watch.get('previous_md5') and watch.get('previous_md5') != fetched_md5:
|
||||
# Yes if we only care about it going to instock, AND we are in stock
|
||||
if watch.get('in_stock_only') and update_obj["in_stock"]:
|
||||
changed_detected = True
|
||||
|
||||
if not watch.get('in_stock_only'):
|
||||
# All cases
|
||||
changed_detected = True
|
||||
|
||||
# Always record the new checksum
|
||||
update_obj["previous_md5"] = fetched_md5
|
||||
|
||||
return changed_detected, update_obj, fetcher.instock_data.encode('utf-8')
|
||||
@@ -1,23 +1,37 @@
|
||||
# HTML to TEXT/JSON DIFFERENCE FETCHER
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import urllib3
|
||||
|
||||
from changedetectionio import content_fetcher, html_tools
|
||||
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
|
||||
from copy import deepcopy
|
||||
from . import difference_detection_processor
|
||||
from ..html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
name = 'Webpage Text/HTML, JSON and PDF changes'
|
||||
description = 'Detects all text changes where possible'
|
||||
json_filter_prefixes = ['json:', 'jq:']
|
||||
|
||||
class FilterNotFoundInResponse(ValueError):
|
||||
def __init__(self, msg):
|
||||
ValueError.__init__(self, msg)
|
||||
|
||||
|
||||
class PDFToHTMLToolNotFound(ValueError):
|
||||
def __init__(self, msg):
|
||||
ValueError.__init__(self, msg)
|
||||
|
||||
|
||||
# Some common stuff here that can be moved to a base class
|
||||
# (set_proxy_from_list)
|
||||
class perform_site_check():
|
||||
class perform_site_check(difference_detection_processor):
|
||||
screenshot = None
|
||||
xpath_data = None
|
||||
|
||||
@@ -25,30 +39,15 @@ class perform_site_check():
|
||||
super().__init__(*args, **kwargs)
|
||||
self.datastore = datastore
|
||||
|
||||
# Doesn't look like python supports forward slash auto enclosure in re.findall
|
||||
# So convert it to inline flag "foobar(?i)" type configuration
|
||||
def forward_slash_enclosed_regex_to_options(self, regex):
|
||||
res = re.search(r'^/(.*?)/(\w+)$', regex, re.IGNORECASE)
|
||||
|
||||
if res:
|
||||
regex = res.group(1)
|
||||
regex += '(?{})'.format(res.group(2))
|
||||
else:
|
||||
regex += '(?{})'.format('i')
|
||||
|
||||
return regex
|
||||
|
||||
def run(self, uuid):
|
||||
from copy import deepcopy
|
||||
def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
|
||||
changed_detected = False
|
||||
screenshot = False # as bytes
|
||||
stripped_text_from_html = ""
|
||||
|
||||
# DeepCopy so we can be sure we don't accidently change anything by reference
|
||||
watch = deepcopy(self.datastore.data['watching'].get(uuid))
|
||||
|
||||
if not watch:
|
||||
return
|
||||
raise Exception("Watch no longer exists.")
|
||||
|
||||
# Protect against file:// access
|
||||
if re.search(r'^file', watch.get('url', ''), re.IGNORECASE) and not os.getenv('ALLOW_FILE_URI', False):
|
||||
@@ -59,11 +58,10 @@ class perform_site_check():
|
||||
# Unset any existing notification error
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
|
||||
extra_headers = watch.get('headers', [])
|
||||
|
||||
# Tweak the base config with the per-watch ones
|
||||
request_headers = deepcopy(self.datastore.data['settings']['headers'])
|
||||
request_headers.update(extra_headers)
|
||||
request_headers = watch.get('headers', [])
|
||||
request_headers.update(self.datastore.get_all_base_headers())
|
||||
request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=uuid))
|
||||
|
||||
# https://github.com/psf/requests/issues/4525
|
||||
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
||||
@@ -86,14 +84,21 @@ class perform_site_check():
|
||||
is_source = True
|
||||
|
||||
# Pluggable content fetcher
|
||||
prefer_backend = watch.get('fetch_backend')
|
||||
prefer_backend = watch.get_fetch_backend
|
||||
if not prefer_backend or prefer_backend == 'system':
|
||||
prefer_backend = self.datastore.data['settings']['application']['fetch_backend']
|
||||
|
||||
if hasattr(content_fetcher, prefer_backend):
|
||||
klass = getattr(content_fetcher, prefer_backend)
|
||||
else:
|
||||
# If the klass doesnt exist, just use a default
|
||||
klass = getattr(content_fetcher, "html_requests")
|
||||
|
||||
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
||||
if preferred_proxy:
|
||||
proxy_id = preferred_proxy
|
||||
else:
|
||||
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
||||
|
||||
proxy_url = None
|
||||
if proxy_id:
|
||||
proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
|
||||
@@ -116,12 +121,26 @@ class perform_site_check():
|
||||
if watch.get('webdriver_js_execute_code') is not None and watch.get('webdriver_js_execute_code').strip():
|
||||
fetcher.webdriver_js_execute_code = watch.get('webdriver_js_execute_code')
|
||||
|
||||
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'))
|
||||
# requests for PDF's, images etc should be passwd the is_binary flag
|
||||
is_binary = watch.is_pdf
|
||||
|
||||
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'),
|
||||
is_binary=is_binary)
|
||||
fetcher.quit()
|
||||
|
||||
self.screenshot = fetcher.screenshot
|
||||
self.xpath_data = fetcher.xpath_data
|
||||
|
||||
# Track the content type
|
||||
update_obj['content_type'] = fetcher.get_all_headers().get('content-type', '').lower()
|
||||
|
||||
# Watches added automatically in the queue manager will skip if its the same checksum as the previous run
|
||||
# Saves a lot of CPU
|
||||
update_obj['previous_md5_before_filters'] = hashlib.md5(fetcher.content.encode('utf-8')).hexdigest()
|
||||
if skip_when_checksum_same:
|
||||
if update_obj['previous_md5_before_filters'] == watch.get('previous_md5_before_filters'):
|
||||
raise content_fetcher.checksumFromPreviousCheckWasTheSame()
|
||||
|
||||
# Fetching complete, now filters
|
||||
# @todo move to class / maybe inside of fetcher abstract base?
|
||||
|
||||
@@ -132,31 +151,78 @@ class perform_site_check():
|
||||
# https://stackoverflow.com/questions/41817578/basic-method-chaining ?
|
||||
# return content().textfilter().jsonextract().checksumcompare() ?
|
||||
|
||||
is_json = 'application/json' in fetcher.headers.get('Content-Type', '')
|
||||
is_json = 'application/json' in fetcher.get_all_headers().get('content-type', '').lower()
|
||||
is_html = not is_json
|
||||
is_rss = False
|
||||
|
||||
ctype_header = fetcher.get_all_headers().get('content-type', '').lower()
|
||||
# Go into RSS preprocess for converting CDATA/comment to usable text
|
||||
if any(substring in ctype_header for substring in ['application/xml', 'application/rss', 'text/xml']):
|
||||
if '<rss' in fetcher.content[:100].lower():
|
||||
fetcher.content = cdata_in_document_to_text(html_content=fetcher.content)
|
||||
is_rss = True
|
||||
|
||||
# source: support, basically treat it as plaintext
|
||||
if is_source:
|
||||
is_html = False
|
||||
is_json = False
|
||||
|
||||
include_filters_rule = watch.get('include_filters', [])
|
||||
# include_filters_rule = watch['include_filters']
|
||||
subtractive_selectors = watch.get(
|
||||
"subtractive_selectors", []
|
||||
) + self.datastore.data["settings"]["application"].get(
|
||||
"global_subtractive_selectors", []
|
||||
)
|
||||
inline_pdf = fetcher.get_all_headers().get('content-disposition', '') and '%PDF-1' in fetcher.content[:10]
|
||||
if watch.is_pdf or 'application/pdf' in fetcher.get_all_headers().get('content-type', '').lower() or inline_pdf:
|
||||
from shutil import which
|
||||
tool = os.getenv("PDF_TO_HTML_TOOL", "pdftohtml")
|
||||
if not which(tool):
|
||||
raise PDFToHTMLToolNotFound("Command-line `{}` tool was not found in system PATH, was it installed?".format(tool))
|
||||
|
||||
has_filter_rule = include_filters_rule and len("".join(include_filters_rule).strip())
|
||||
has_subtractive_selectors = subtractive_selectors and len(subtractive_selectors[0].strip())
|
||||
import subprocess
|
||||
proc = subprocess.Popen(
|
||||
[tool, '-stdout', '-', '-s', 'out.pdf', '-i'],
|
||||
stdout=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE)
|
||||
proc.stdin.write(fetcher.raw_content)
|
||||
proc.stdin.close()
|
||||
fetcher.content = proc.stdout.read().decode('utf-8')
|
||||
proc.wait(timeout=60)
|
||||
|
||||
# Add a little metadata so we know if the file changes (like if an image changes, but the text is the same
|
||||
# @todo may cause problems with non-UTF8?
|
||||
metadata = "<p>Added by changedetection.io: Document checksum - {} Filesize - {} bytes</p>".format(
|
||||
hashlib.md5(fetcher.raw_content).hexdigest().upper(),
|
||||
len(fetcher.content))
|
||||
|
||||
fetcher.content = fetcher.content.replace('</body>', metadata + '</body>')
|
||||
|
||||
# Better would be if Watch.model could access the global data also
|
||||
# and then use getattr https://docs.python.org/3/reference/datamodel.html#object.__getitem__
|
||||
# https://realpython.com/inherit-python-dict/ instead of doing it procedurely
|
||||
include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='include_filters')
|
||||
include_filters_rule = [*watch.get('include_filters', []), *include_filters_from_tags]
|
||||
|
||||
subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='subtractive_selectors'),
|
||||
*watch.get("subtractive_selectors", []),
|
||||
*self.datastore.data["settings"]["application"].get("global_subtractive_selectors", [])
|
||||
]
|
||||
|
||||
# Inject a virtual LD+JSON price tracker rule
|
||||
if watch.get('track_ldjson_price_data', '') == PRICE_DATA_TRACK_ACCEPT:
|
||||
include_filters_rule += html_tools.LD_JSON_PRODUCT_OFFER_SELECTORS
|
||||
|
||||
has_filter_rule = len(include_filters_rule) and len(include_filters_rule[0].strip())
|
||||
has_subtractive_selectors = len(subtractive_selectors) and len(subtractive_selectors[0].strip())
|
||||
|
||||
if is_json and not has_filter_rule:
|
||||
include_filters_rule.append("json:$")
|
||||
has_filter_rule = True
|
||||
|
||||
if is_json:
|
||||
# Sort the JSON so we dont get false alerts when the content is just re-ordered
|
||||
try:
|
||||
fetcher.content = json.dumps(json.loads(fetcher.content), sort_keys=True)
|
||||
except Exception as e:
|
||||
# Might have just been a snippet, or otherwise bad JSON, continue
|
||||
pass
|
||||
|
||||
if has_filter_rule:
|
||||
json_filter_prefixes = ['json:', 'jq:']
|
||||
for filter in include_filters_rule:
|
||||
if any(prefix in filter for prefix in json_filter_prefixes):
|
||||
stripped_text_from_html += html_tools.extract_json_as_string(content=fetcher.content, json_filter=filter)
|
||||
@@ -169,19 +235,24 @@ class perform_site_check():
|
||||
html_content = fetcher.content
|
||||
|
||||
# If not JSON, and if it's not text/plain..
|
||||
if 'text/plain' in fetcher.headers.get('Content-Type', '').lower():
|
||||
if 'text/plain' in fetcher.get_all_headers().get('content-type', '').lower():
|
||||
# Don't run get_text or xpath/css filters on plaintext
|
||||
stripped_text_from_html = html_content
|
||||
else:
|
||||
# Does it have some ld+json price data? used for easier monitoring
|
||||
update_obj['has_ldjson_price_data'] = html_tools.has_ldjson_product_info(fetcher.content)
|
||||
|
||||
# Then we assume HTML
|
||||
if has_filter_rule:
|
||||
html_content = ""
|
||||
|
||||
for filter_rule in include_filters_rule:
|
||||
# For HTML/XML we offer xpath as an option, just start a regular xPath "/.."
|
||||
if filter_rule[0] == '/' or filter_rule.startswith('xpath:'):
|
||||
html_content += html_tools.xpath_filter(xpath_filter=filter_rule.replace('xpath:', ''),
|
||||
html_content=fetcher.content,
|
||||
append_pretty_line_formatting=not is_source)
|
||||
append_pretty_line_formatting=not is_source,
|
||||
is_rss=is_rss)
|
||||
else:
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
html_content += html_tools.include_filters(include_filters=filter_rule,
|
||||
@@ -201,17 +272,50 @@ class perform_site_check():
|
||||
do_anchor = self.datastore.data["settings"]["application"].get("render_anchor_tag_content", False)
|
||||
stripped_text_from_html = \
|
||||
html_tools.html_to_text(
|
||||
html_content,
|
||||
render_anchor_tag_content=do_anchor
|
||||
html_content=html_content,
|
||||
render_anchor_tag_content=do_anchor,
|
||||
is_rss=is_rss # #1874 activate the <title workaround hack
|
||||
)
|
||||
|
||||
# Re #340 - return the content before the 'ignore text' was applied
|
||||
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
|
||||
|
||||
# @todo whitespace coming from missing rtrim()?
|
||||
# stripped_text_from_html could be based on their preferences, replace the processed text with only that which they want to know about.
|
||||
# Rewrite's the processing text based on only what diff result they want to see
|
||||
if watch.has_special_diff_filter_options_set() and len(watch.history.keys()):
|
||||
# Now the content comes from the diff-parser and not the returned HTTP traffic, so could be some differences
|
||||
from .. import diff
|
||||
# needs to not include (added) etc or it may get used twice
|
||||
# Replace the processed text with the preferred result
|
||||
rendered_diff = diff.render_diff(previous_version_file_contents=watch.get_last_fetched_before_filters(),
|
||||
newest_version_file_contents=stripped_text_from_html,
|
||||
include_equal=False, # not the same lines
|
||||
include_added=watch.get('filter_text_added', True),
|
||||
include_removed=watch.get('filter_text_removed', True),
|
||||
include_replaced=watch.get('filter_text_replaced', True),
|
||||
line_feed_sep="\n",
|
||||
include_change_type_prefix=False)
|
||||
|
||||
watch.save_last_fetched_before_filters(text_content_before_ignored_filter)
|
||||
|
||||
if not rendered_diff and stripped_text_from_html:
|
||||
# We had some content, but no differences were found
|
||||
# Store our new file as the MD5 so it will trigger in the future
|
||||
c = hashlib.md5(text_content_before_ignored_filter.translate(None, b'\r\n\t ')).hexdigest()
|
||||
return False, {'previous_md5': c}, stripped_text_from_html.encode('utf-8')
|
||||
else:
|
||||
stripped_text_from_html = rendered_diff
|
||||
|
||||
# Treat pages with no renderable text content as a change? No by default
|
||||
empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
|
||||
if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0:
|
||||
raise content_fetcher.ReplyWithContentButNoText(url=url, status_code=fetcher.get_last_status_code(), screenshot=screenshot)
|
||||
raise content_fetcher.ReplyWithContentButNoText(url=url,
|
||||
status_code=fetcher.get_last_status_code(),
|
||||
screenshot=screenshot,
|
||||
has_filters=has_filter_rule,
|
||||
html_content=html_content
|
||||
)
|
||||
|
||||
# We rely on the actual text in the html output.. many sites have random script vars etc,
|
||||
# in the future we'll implement other mechanisms.
|
||||
@@ -232,16 +336,25 @@ class perform_site_check():
|
||||
regex_matched_output = []
|
||||
for s_re in extract_text:
|
||||
# incase they specified something in '/.../x'
|
||||
regex = self.forward_slash_enclosed_regex_to_options(s_re)
|
||||
result = re.findall(regex.encode('utf-8'), stripped_text_from_html)
|
||||
if re.search(PERL_STYLE_REGEX, s_re, re.IGNORECASE):
|
||||
regex = html_tools.perl_style_slash_enclosed_regex_to_options(s_re)
|
||||
result = re.findall(regex.encode('utf-8'), stripped_text_from_html)
|
||||
|
||||
for l in result:
|
||||
if type(l) is tuple:
|
||||
# @todo - some formatter option default (between groups)
|
||||
regex_matched_output += list(l) + [b'\n']
|
||||
else:
|
||||
# @todo - some formatter option default (between each ungrouped result)
|
||||
regex_matched_output += [l] + [b'\n']
|
||||
for l in result:
|
||||
if type(l) is tuple:
|
||||
# @todo - some formatter option default (between groups)
|
||||
regex_matched_output += list(l) + [b'\n']
|
||||
else:
|
||||
# @todo - some formatter option default (between each ungrouped result)
|
||||
regex_matched_output += [l] + [b'\n']
|
||||
else:
|
||||
# Doesnt look like regex, just hunt for plaintext and return that which matches
|
||||
# `stripped_text_from_html` will be bytes, so we must encode s_re also to bytes
|
||||
r = re.compile(re.escape(s_re.encode('utf-8')), re.IGNORECASE)
|
||||
res = r.findall(stripped_text_from_html)
|
||||
if res:
|
||||
for match in res:
|
||||
regex_matched_output += [match] + [b'\n']
|
||||
|
||||
# Now we will only show what the regex matched
|
||||
stripped_text_from_html = b''
|
||||
@@ -266,6 +379,7 @@ class perform_site_check():
|
||||
blocked = True
|
||||
# Filter and trigger works the same, so reuse it
|
||||
# It should return the line numbers that match
|
||||
# Unblock flow if the trigger was found (some text remained after stripped what didnt match)
|
||||
result = html_tools.strip_ignore_text(content=str(stripped_text_from_html),
|
||||
wordlist=trigger_text,
|
||||
mode="line numbers")
|
||||
10
changedetectionio/queuedWatchMetaData.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any
|
||||
|
||||
# So that we can queue some metadata in `item`
|
||||
# https://docs.python.org/3/library/queue.html#queue.PriorityQueue
|
||||
#
|
||||
@dataclass(order=True)
|
||||
class PrioritizedItem:
|
||||
priority: int
|
||||
item: Any=field(compare=False)
|
||||
190
changedetectionio/res/puppeteer_fetch.js
Normal file
@@ -0,0 +1,190 @@
|
||||
module.exports = async ({page, context}) => {
|
||||
|
||||
var {
|
||||
url,
|
||||
execute_js,
|
||||
user_agent,
|
||||
extra_wait_ms,
|
||||
req_headers,
|
||||
include_filters,
|
||||
xpath_element_js,
|
||||
screenshot_quality,
|
||||
proxy_username,
|
||||
proxy_password,
|
||||
disk_cache_dir,
|
||||
no_cache_list,
|
||||
block_url_list,
|
||||
} = context;
|
||||
|
||||
await page.setBypassCSP(true)
|
||||
await page.setExtraHTTPHeaders(req_headers);
|
||||
|
||||
if (user_agent) {
|
||||
await page.setUserAgent(user_agent);
|
||||
}
|
||||
// https://ourcodeworld.com/articles/read/1106/how-to-solve-puppeteer-timeouterror-navigation-timeout-of-30000-ms-exceeded
|
||||
|
||||
await page.setDefaultNavigationTimeout(0);
|
||||
|
||||
if (proxy_username) {
|
||||
// Setting Proxy-Authentication header is deprecated, and doing so can trigger header change errors from Puppeteer
|
||||
// https://github.com/puppeteer/puppeteer/issues/676 ?
|
||||
// https://help.brightdata.com/hc/en-us/articles/12632549957649-Proxy-Manager-How-to-Guides#h_01HAKWR4Q0AFS8RZTNYWRDFJC2
|
||||
// https://cri.dev/posts/2020-03-30-How-to-solve-Puppeteer-Chrome-Error-ERR_INVALID_ARGUMENT/
|
||||
await page.authenticate({
|
||||
username: proxy_username,
|
||||
password: proxy_password
|
||||
});
|
||||
}
|
||||
|
||||
await page.setViewport({
|
||||
width: 1024,
|
||||
height: 768,
|
||||
deviceScaleFactor: 1,
|
||||
});
|
||||
|
||||
await page.setRequestInterception(true);
|
||||
if (disk_cache_dir) {
|
||||
console.log(">>>>>>>>>>>>>>> LOCAL DISK CACHE ENABLED <<<<<<<<<<<<<<<<<<<<<");
|
||||
}
|
||||
const fs = require('fs');
|
||||
const crypto = require('crypto');
|
||||
|
||||
function file_is_expired(file_path) {
|
||||
if (!fs.existsSync(file_path)) {
|
||||
return true;
|
||||
}
|
||||
var stats = fs.statSync(file_path);
|
||||
const now_date = new Date();
|
||||
const expire_seconds = 300;
|
||||
if ((now_date / 1000) - (stats.mtime.getTime() / 1000) > expire_seconds) {
|
||||
console.log("CACHE EXPIRED: " + file_path);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
||||
}
|
||||
|
||||
page.on('request', async (request) => {
|
||||
// General blocking of requests that waste traffic
|
||||
if (block_url_list.some(substring => request.url().toLowerCase().includes(substring))) return request.abort();
|
||||
|
||||
if (disk_cache_dir) {
|
||||
const url = request.url();
|
||||
const key = crypto.createHash('md5').update(url).digest("hex");
|
||||
const dir_path = disk_cache_dir + key.slice(0, 1) + '/' + key.slice(1, 2) + '/' + key.slice(2, 3) + '/';
|
||||
|
||||
// https://stackoverflow.com/questions/4482686/check-synchronously-if-file-directory-exists-in-node-js
|
||||
|
||||
if (fs.existsSync(dir_path + key)) {
|
||||
console.log("* CACHE HIT , using - " + dir_path + key + " - " + url);
|
||||
const cached_data = fs.readFileSync(dir_path + key);
|
||||
// @todo headers can come from dir_path+key+".meta" json file
|
||||
request.respond({
|
||||
status: 200,
|
||||
//contentType: 'text/html', //@todo
|
||||
body: cached_data
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
request.continue();
|
||||
});
|
||||
|
||||
|
||||
if (disk_cache_dir) {
|
||||
page.on('response', async (response) => {
|
||||
const url = response.url();
|
||||
// Basic filtering for sane responses
|
||||
if (response.request().method() != 'GET' || response.request().resourceType() == 'xhr' || response.request().resourceType() == 'document' || response.status() != 200) {
|
||||
console.log("Skipping (not useful) - Status:" + response.status() + " Method:" + response.request().method() + " ResourceType:" + response.request().resourceType() + " " + url);
|
||||
return;
|
||||
}
|
||||
if (no_cache_list.some(substring => url.toLowerCase().includes(substring))) {
|
||||
console.log("Skipping (no_cache_list) - " + url);
|
||||
return;
|
||||
}
|
||||
if (url.toLowerCase().includes('data:')) {
|
||||
console.log("Skipping (embedded-data) - " + url);
|
||||
return;
|
||||
}
|
||||
response.buffer().then(buffer => {
|
||||
if (buffer.length > 100) {
|
||||
console.log("Cache - Saving " + response.request().method() + " - " + url + " - " + response.request().resourceType());
|
||||
|
||||
const key = crypto.createHash('md5').update(url).digest("hex");
|
||||
const dir_path = disk_cache_dir + key.slice(0, 1) + '/' + key.slice(1, 2) + '/' + key.slice(2, 3) + '/';
|
||||
|
||||
if (!fs.existsSync(dir_path)) {
|
||||
fs.mkdirSync(dir_path, {recursive: true})
|
||||
}
|
||||
|
||||
if (fs.existsSync(dir_path + key)) {
|
||||
if (file_is_expired(dir_path + key)) {
|
||||
fs.writeFileSync(dir_path + key, buffer);
|
||||
}
|
||||
} else {
|
||||
fs.writeFileSync(dir_path + key, buffer);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const r = await page.goto(url, {
|
||||
waitUntil: 'load'
|
||||
});
|
||||
|
||||
await page.waitForTimeout(1000);
|
||||
await page.waitForTimeout(extra_wait_ms);
|
||||
|
||||
if (execute_js) {
|
||||
await page.evaluate(execute_js);
|
||||
await page.waitForTimeout(200);
|
||||
}
|
||||
|
||||
var xpath_data;
|
||||
var instock_data;
|
||||
try {
|
||||
// Not sure the best way here, in the future this should be a new package added to npm then run in browserless
|
||||
// (Once the old playwright is removed)
|
||||
xpath_data = await page.evaluate((include_filters) => {%xpath_scrape_code%}, include_filters);
|
||||
instock_data = await page.evaluate(() => {%instock_scrape_code%});
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
|
||||
// Protocol error (Page.captureScreenshot): Cannot take screenshot with 0 width can come from a proxy auth failure
|
||||
// Wrap it here (for now)
|
||||
|
||||
var b64s = false;
|
||||
try {
|
||||
b64s = await page.screenshot({encoding: "base64", fullPage: true, quality: screenshot_quality, type: 'jpeg'});
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
|
||||
// May fail on very large pages with 'WARNING: tile memory limits exceeded, some content may not draw'
|
||||
if (!b64s) {
|
||||
// @todo after text extract, we can place some overlay text with red background to say 'croppped'
|
||||
console.error('ERROR: content-fetcher page was maybe too large for a screenshot, reverting to viewport only screenshot');
|
||||
try {
|
||||
b64s = await page.screenshot({encoding: "base64", quality: screenshot_quality, type: 'jpeg'});
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
}
|
||||
|
||||
var html = await page.content();
|
||||
return {
|
||||
data: {
|
||||
'content': html,
|
||||
'headers': r.headers(),
|
||||
'instock_data': instock_data,
|
||||
'screenshot': b64s,
|
||||
'status_code': r.status(),
|
||||
'xpath_data': xpath_data
|
||||
},
|
||||
type: 'application/json',
|
||||
};
|
||||
};
|
||||
108
changedetectionio/res/stock-not-in-stock.js
Normal file
@@ -0,0 +1,108 @@
|
||||
function isItemInStock() {
|
||||
// @todo Pass these in so the same list can be used in non-JS fetchers
|
||||
const outOfStockTexts = [
|
||||
'0 in stock',
|
||||
'agotado',
|
||||
'artikel zurzeit vergriffen',
|
||||
'as soon as stock is available',
|
||||
'ausverkauft', // sold out
|
||||
'available for back order',
|
||||
'back-order or out of stock',
|
||||
'backordered',
|
||||
'benachrichtigt mich', // notify me
|
||||
'brak na stanie',
|
||||
'brak w magazynie',
|
||||
'coming soon',
|
||||
'currently have any tickets for this',
|
||||
'currently unavailable',
|
||||
'dostępne wkrótce',
|
||||
'en rupture de stock',
|
||||
'ist derzeit nicht auf lager',
|
||||
'item is no longer available',
|
||||
'message if back in stock',
|
||||
'nachricht bei',
|
||||
'nicht auf lager',
|
||||
'nicht lieferbar',
|
||||
'nicht zur verfügung',
|
||||
'no disponible temporalmente',
|
||||
'no longer in stock',
|
||||
'no tickets available',
|
||||
'not available',
|
||||
'not currently available',
|
||||
'not in stock',
|
||||
'notify me when available',
|
||||
'não estamos a aceitar encomendas',
|
||||
'out of stock',
|
||||
'out-of-stock',
|
||||
'produkt niedostępny',
|
||||
'sold out',
|
||||
'temporarily out of stock',
|
||||
'temporarily unavailable',
|
||||
'tickets unavailable',
|
||||
'unavailable tickets',
|
||||
'we do not currently have an estimate of when this product will be back in stock.',
|
||||
'zur zeit nicht an lager',
|
||||
'已售完',
|
||||
];
|
||||
|
||||
|
||||
const negateOutOfStockRegexs = [
|
||||
'[0-9] in stock'
|
||||
]
|
||||
var negateOutOfStockRegexs_r = [];
|
||||
for (let i = 0; i < negateOutOfStockRegexs.length; i++) {
|
||||
negateOutOfStockRegexs_r.push(new RegExp(negateOutOfStockRegexs[0], 'g'));
|
||||
}
|
||||
|
||||
|
||||
const elementsWithZeroChildren = Array.from(document.getElementsByTagName('*')).filter(element => element.children.length === 0);
|
||||
|
||||
// REGEXS THAT REALLY MEAN IT'S IN STOCK
|
||||
for (let i = elementsWithZeroChildren.length - 1; i >= 0; i--) {
|
||||
const element = elementsWithZeroChildren[i];
|
||||
if (element.offsetWidth > 0 || element.offsetHeight > 0 || element.getClientRects().length > 0) {
|
||||
var elementText="";
|
||||
if (element.tagName.toLowerCase() === "input") {
|
||||
elementText = element.value.toLowerCase();
|
||||
} else {
|
||||
elementText = element.textContent.toLowerCase();
|
||||
}
|
||||
|
||||
if (elementText.length) {
|
||||
// try which ones could mean its in stock
|
||||
for (let i = 0; i < negateOutOfStockRegexs.length; i++) {
|
||||
if (negateOutOfStockRegexs_r[i].test(elementText)) {
|
||||
return 'Possibly in stock';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// OTHER STUFF THAT COULD BE THAT IT'S OUT OF STOCK
|
||||
for (let i = elementsWithZeroChildren.length - 1; i >= 0; i--) {
|
||||
const element = elementsWithZeroChildren[i];
|
||||
if (element.offsetWidth > 0 || element.offsetHeight > 0 || element.getClientRects().length > 0) {
|
||||
var elementText="";
|
||||
if (element.tagName.toLowerCase() === "input") {
|
||||
elementText = element.value.toLowerCase();
|
||||
} else {
|
||||
elementText = element.textContent.toLowerCase();
|
||||
}
|
||||
|
||||
if (elementText.length) {
|
||||
// and these mean its out of stock
|
||||
for (const outOfStockText of outOfStockTexts) {
|
||||
if (elementText.includes(outOfStockText)) {
|
||||
return elementText; // item is out of stock
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return 'Possibly in stock'; // possibly in stock, cant decide otherwise.
|
||||
}
|
||||
|
||||
// returns the element text that makes it think it's out of stock
|
||||
return isItemInStock();
|
||||
@@ -1,3 +1,22 @@
|
||||
// Copyright (C) 2021 Leigh Morresi (dgtlmoon@gmail.com)
|
||||
// All rights reserved.
|
||||
|
||||
// @file Scrape the page looking for elements of concern (%ELEMENTS%)
|
||||
// http://matatk.agrip.org.uk/tests/position-and-width/
|
||||
// https://stackoverflow.com/questions/26813480/when-is-element-getboundingclientrect-guaranteed-to-be-updated-accurate
|
||||
//
|
||||
// Some pages like https://www.londonstockexchange.com/stock/NCCL/ncondezi-energy-limited/analysis
|
||||
// will automatically force a scroll somewhere, so include the position offset
|
||||
// Lets hope the position doesnt change while we iterate the bbox's, but this is better than nothing
|
||||
var scroll_y = 0;
|
||||
try {
|
||||
scroll_y = +document.documentElement.scrollTop || document.body.scrollTop
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Include the getXpath script directly, easier than fetching
|
||||
function getxpath(e) {
|
||||
var n = e;
|
||||
@@ -25,15 +44,15 @@ const findUpTag = (el) => {
|
||||
if (el.name !== undefined && el.name.length) {
|
||||
var proposed = el.tagName + "[name=" + el.name + "]";
|
||||
var proposed_element = window.document.querySelectorAll(proposed);
|
||||
if(proposed_element.length) {
|
||||
if (proposed_element.length) {
|
||||
if (proposed_element.length === 1) {
|
||||
return proposed;
|
||||
} else {
|
||||
// Some sites change ID but name= stays the same, we can hit it if we know the index
|
||||
// Find all the elements that match and work out the input[n]
|
||||
var n=Array.from(proposed_element).indexOf(el);
|
||||
var n = Array.from(proposed_element).indexOf(el);
|
||||
// Return a Playwright selector for nthinput[name=zipcode]
|
||||
return proposed+" >> nth="+n;
|
||||
return proposed + " >> nth=" + n;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -71,8 +90,21 @@ var bbox;
|
||||
for (var i = 0; i < elements.length; i++) {
|
||||
bbox = elements[i].getBoundingClientRect();
|
||||
|
||||
// forget really small ones
|
||||
if (bbox['width'] < 15 && bbox['height'] < 15) {
|
||||
// Exclude items that are not interactable or visible
|
||||
if(elements[i].style.opacity === "0") {
|
||||
continue
|
||||
}
|
||||
if(elements[i].style.display === "none" || elements[i].style.pointerEvents === "none" ) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip really small ones, and where width or height ==0
|
||||
if (bbox['width'] * bbox['height'] < 100) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Don't include elements that are offset from canvas
|
||||
if (bbox['top']+scroll_y < 0 || bbox['left'] < 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -109,19 +141,20 @@ for (var i = 0; i < elements.length; i++) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// @todo Possible to ONLY list where it's clickable to save JSON xfer size
|
||||
size_pos.push({
|
||||
xpath: xpath_result,
|
||||
width: Math.round(bbox['width']),
|
||||
height: Math.round(bbox['height']),
|
||||
left: Math.floor(bbox['left']),
|
||||
top: Math.floor(bbox['top']),
|
||||
top: Math.floor(bbox['top'])+scroll_y,
|
||||
tagName: (elements[i].tagName) ? elements[i].tagName.toLowerCase() : '',
|
||||
tagtype: (elements[i].tagName == 'INPUT' && elements[i].type) ? elements[i].type.toLowerCase() : ''
|
||||
tagtype: (elements[i].tagName == 'INPUT' && elements[i].type) ? elements[i].type.toLowerCase() : '',
|
||||
isClickable: (elements[i].onclick) || window.getComputedStyle(elements[i]).cursor == "pointer"
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
|
||||
// Inject the current one set in the include_filters, which may be a CSS rule
|
||||
// used for displaying the current one in VisualSelector, where its not one we generated.
|
||||
if (include_filters.length) {
|
||||
@@ -149,22 +182,40 @@ if (include_filters.length) {
|
||||
}
|
||||
|
||||
if (q) {
|
||||
bbox = q.getBoundingClientRect();
|
||||
} else {
|
||||
console.log("xpath_element_scraper: filter element "+f+" was not found");
|
||||
// #1231 - IN the case XPath attribute filter is applied, we will have to traverse up and find the element.
|
||||
if (q.hasOwnProperty('getBoundingClientRect')) {
|
||||
bbox = q.getBoundingClientRect();
|
||||
console.log("xpath_element_scraper: Got filter element, scroll from top was " + scroll_y)
|
||||
} else {
|
||||
try {
|
||||
// Try and see we can find its ownerElement
|
||||
bbox = q.ownerElement.getBoundingClientRect();
|
||||
console.log("xpath_element_scraper: Got filter by ownerElement element, scroll from top was " + scroll_y)
|
||||
} catch (e) {
|
||||
console.log("xpath_element_scraper: error looking up ownerElement")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!q) {
|
||||
console.log("xpath_element_scraper: filter element " + f + " was not found");
|
||||
}
|
||||
|
||||
if (bbox && bbox['width'] > 0 && bbox['height'] > 0) {
|
||||
size_pos.push({
|
||||
xpath: f,
|
||||
width: Math.round(bbox['width']),
|
||||
height: Math.round(bbox['height']),
|
||||
left: Math.floor(bbox['left']),
|
||||
top: Math.floor(bbox['top'])
|
||||
width: parseInt(bbox['width']),
|
||||
height: parseInt(bbox['height']),
|
||||
left: parseInt(bbox['left']),
|
||||
top: parseInt(bbox['top'])+scroll_y
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the elements so we find the smallest one first, in other words, we find the smallest one matching in that area
|
||||
// so that we dont select the wrapping element by mistake and be unable to select what we want
|
||||
size_pos.sort((a, b) => (a.width*a.height > b.width*b.height) ? 1 : -1)
|
||||
|
||||
// Window.width required for proper scaling in the frontend
|
||||
return {'size_pos': size_pos, 'browser_width': window.innerWidth};
|
||||
|
||||
@@ -1,104 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
|
||||
# live_server will throw errors even with live_server_scope=function if I have the live_server setup in different functions
|
||||
# and I like to restart the server for each test (and have the test cleanup after each test)
|
||||
# merge request welcome :)
|
||||
|
||||
|
||||
# exit when any command fails
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||
|
||||
find tests/test_*py -type f|while read test_name
|
||||
do
|
||||
echo "TEST RUNNING $test_name"
|
||||
pytest $test_name
|
||||
done
|
||||
|
||||
echo "RUNNING WITH BASE_URL SET"
|
||||
|
||||
# Now re-run some tests with BASE_URL enabled
|
||||
# Re #65 - Ability to include a link back to the installation, in the notification.
|
||||
export BASE_URL="https://really-unique-domain.io"
|
||||
pytest tests/test_notification.py
|
||||
|
||||
|
||||
# Re-run with HIDE_REFERER set - could affect login
|
||||
export HIDE_REFERER=True
|
||||
pytest tests/test_access_control.py
|
||||
|
||||
|
||||
# Now for the selenium and playwright/browserless fetchers
|
||||
# Note - this is not UI functional tests - just checking that each one can fetch the content
|
||||
|
||||
echo "TESTING WEBDRIVER FETCH > SELENIUM/WEBDRIVER..."
|
||||
docker run -d --name $$-test_selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome-debug:3.141.59
|
||||
# takes a while to spin up
|
||||
sleep 5
|
||||
export WEBDRIVER_URL=http://localhost:4444/wd/hub
|
||||
pytest tests/fetchers/test_content.py
|
||||
pytest tests/test_errorhandling.py
|
||||
unset WEBDRIVER_URL
|
||||
docker kill $$-test_selenium
|
||||
|
||||
echo "TESTING WEBDRIVER FETCH > PLAYWRIGHT/BROWSERLESS..."
|
||||
# Not all platforms support playwright (not ARM/rPI), so it's not packaged in requirements.txt
|
||||
PLAYWRIGHT_VERSION=$(grep -i -E "RUN pip install.+" "$SCRIPT_DIR/../Dockerfile" | grep --only-matching -i -E "playwright[=><~+]+[0-9\.]+")
|
||||
echo "using $PLAYWRIGHT_VERSION"
|
||||
pip3 install "$PLAYWRIGHT_VERSION"
|
||||
docker run -d --name $$-test_browserless -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.53-chrome-stable
|
||||
# takes a while to spin up
|
||||
sleep 5
|
||||
export PLAYWRIGHT_DRIVER_URL=ws://127.0.0.1:3000
|
||||
pytest tests/fetchers/test_content.py
|
||||
pytest tests/test_errorhandling.py
|
||||
pytest tests/visualselector/test_fetch_data.py
|
||||
|
||||
unset PLAYWRIGHT_DRIVER_URL
|
||||
docker kill $$-test_browserless
|
||||
|
||||
# Test proxy list handling, starting two squids on different ports
|
||||
# Each squid adds a different header to the response, which is the main thing we test for.
|
||||
docker run -d --name $$-squid-one --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf -p 3128:3128 ubuntu/squid:4.13-21.10_edge
|
||||
docker run -d --name $$-squid-two --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf -p 3129:3128 ubuntu/squid:4.13-21.10_edge
|
||||
|
||||
|
||||
# So, basic HTTP as env var test
|
||||
export HTTP_PROXY=http://localhost:3128
|
||||
export HTTPS_PROXY=http://localhost:3128
|
||||
pytest tests/proxy_list/test_proxy.py
|
||||
docker logs $$-squid-one 2>/dev/null|grep one.changedetection.io
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "Did not see a request to one.changedetection.io in the squid logs (while checking env vars HTTP_PROXY/HTTPS_PROXY)"
|
||||
fi
|
||||
unset HTTP_PROXY
|
||||
unset HTTPS_PROXY
|
||||
|
||||
|
||||
# 2nd test actually choose the preferred proxy from proxies.json
|
||||
cp tests/proxy_list/proxies.json-example ./test-datastore/proxies.json
|
||||
# Makes a watch use a preferred proxy
|
||||
pytest tests/proxy_list/test_multiple_proxy.py
|
||||
|
||||
# Should be a request in the default "first" squid
|
||||
docker logs $$-squid-one 2>/dev/null|grep chosen.changedetection.io
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "Did not see a request to chosen.changedetection.io in the squid logs (while checking preferred proxy)"
|
||||
fi
|
||||
|
||||
# And one in the 'second' squid (user selects this as preferred)
|
||||
docker logs $$-squid-two 2>/dev/null|grep chosen.changedetection.io
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "Did not see a request to chosen.changedetection.io in the squid logs (while checking preferred proxy)"
|
||||
fi
|
||||
|
||||
# @todo - test system override proxy selection and watch defaults, setup a 3rd squid?
|
||||
docker kill $$-squid-one
|
||||
docker kill $$-squid-two
|
||||
|
||||
|
||||
38
changedetectionio/run_basic_tests.sh
Executable file
@@ -0,0 +1,38 @@
|
||||
#!/bin/bash
|
||||
|
||||
|
||||
# live_server will throw errors even with live_server_scope=function if I have the live_server setup in different functions
|
||||
# and I like to restart the server for each test (and have the test cleanup after each test)
|
||||
# merge request welcome :)
|
||||
|
||||
|
||||
# exit when any command fails
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||
|
||||
find tests/test_*py -type f|while read test_name
|
||||
do
|
||||
echo "TEST RUNNING $test_name"
|
||||
pytest $test_name
|
||||
done
|
||||
|
||||
echo "RUNNING WITH BASE_URL SET"
|
||||
|
||||
# Now re-run some tests with BASE_URL enabled
|
||||
# Re #65 - Ability to include a link back to the installation, in the notification.
|
||||
export BASE_URL="https://really-unique-domain.io"
|
||||
pytest tests/test_notification.py
|
||||
|
||||
|
||||
# Re-run with HIDE_REFERER set - could affect login
|
||||
export HIDE_REFERER=True
|
||||
pytest tests/test_access_control.py
|
||||
|
||||
# Re-run a few tests that will trigger brotli based storage
|
||||
export SNAPSHOT_BROTLI_COMPRESSION_THRESHOLD=5
|
||||
pytest tests/test_access_control.py
|
||||
pytest tests/test_notification.py
|
||||
pytest tests/test_backend.py
|
||||
pytest tests/test_rss.py
|
||||
pytest tests/test_unique_lines.py
|
||||
116
changedetectionio/run_proxy_tests.sh
Executable file
@@ -0,0 +1,116 @@
|
||||
#!/bin/bash
|
||||
|
||||
# exit when any command fails
|
||||
set -e
|
||||
# enable debug
|
||||
set -x
|
||||
|
||||
# Test proxy list handling, starting two squids on different ports
|
||||
# Each squid adds a different header to the response, which is the main thing we test for.
|
||||
docker run --network changedet-network -d --name squid-one --hostname squid-one --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf ubuntu/squid:4.13-21.10_edge
|
||||
docker run --network changedet-network -d --name squid-two --hostname squid-two --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf ubuntu/squid:4.13-21.10_edge
|
||||
|
||||
# SOCKS5 related - start simple Socks5 proxy server
|
||||
# SOCKSTEST=xyz should show in the logs of this service to confirm it fetched
|
||||
docker run --network changedet-network -d --hostname socks5proxy --name socks5proxy -p 1080:1080 -e PROXY_USER=proxy_user123 -e PROXY_PASSWORD=proxy_pass123 serjs/go-socks5-proxy
|
||||
docker run --network changedet-network -d --hostname socks5proxy-noauth -p 1081:1080 --name socks5proxy-noauth serjs/go-socks5-proxy
|
||||
|
||||
echo "---------------------------------- SOCKS5 -------------------"
|
||||
# SOCKS5 related - test from proxies.json
|
||||
docker run --network changedet-network \
|
||||
-v `pwd`/tests/proxy_socks5/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json \
|
||||
--rm \
|
||||
-e "SOCKSTEST=proxiesjson" \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py'
|
||||
|
||||
# SOCKS5 related - by manually entering in UI
|
||||
docker run --network changedet-network \
|
||||
--rm \
|
||||
-e "SOCKSTEST=manual" \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy.py'
|
||||
|
||||
# SOCKS5 related - test from proxies.json via playwright - NOTE- PLAYWRIGHT DOESNT SUPPORT AUTHENTICATING PROXY
|
||||
docker run --network changedet-network \
|
||||
-e "SOCKSTEST=manual-playwright" \
|
||||
-v `pwd`/tests/proxy_socks5/proxies.json-example-noauth:/app/changedetectionio/test-datastore/proxies.json \
|
||||
-e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" \
|
||||
--rm \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py'
|
||||
|
||||
echo "socks5 server logs"
|
||||
docker logs socks5proxy
|
||||
echo "----------------------------------"
|
||||
|
||||
# Used for configuring a custom proxy URL via the UI
|
||||
docker run --network changedet-network -d \
|
||||
--name squid-custom \
|
||||
--hostname squid-custom \
|
||||
--rm \
|
||||
-v `pwd`/tests/proxy_list/squid-auth.conf:/etc/squid/conf.d/debian.conf \
|
||||
-v `pwd`/tests/proxy_list/squid-passwords.txt:/etc/squid3/passwords \
|
||||
ubuntu/squid:4.13-21.10_edge
|
||||
|
||||
|
||||
## 2nd test actually choose the preferred proxy from proxies.json
|
||||
docker run --network changedet-network \
|
||||
-v `pwd`/tests/proxy_list/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_list/test_multiple_proxy.py'
|
||||
|
||||
|
||||
## Should be a request in the default "first" squid
|
||||
docker logs squid-one 2>/dev/null|grep chosen.changedetection.io
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "Did not see a request to chosen.changedetection.io in the squid logs (while checking preferred proxy - squid one)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# And one in the 'second' squid (user selects this as preferred)
|
||||
docker logs squid-two 2>/dev/null|grep chosen.changedetection.io
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "Did not see a request to chosen.changedetection.io in the squid logs (while checking preferred proxy - squid two)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# Test the UI configurable proxies
|
||||
docker run --network changedet-network \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_list/test_select_custom_proxy.py'
|
||||
|
||||
|
||||
# Should see a request for one.changedetection.io in there
|
||||
docker logs squid-custom 2>/dev/null|grep "TCP_TUNNEL.200.*changedetection.io"
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "Did not see a valid request to changedetection.io in the squid logs (while checking preferred proxy - squid two)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Test "no-proxy" option
|
||||
docker run --network changedet-network \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_list/test_noproxy.py'
|
||||
|
||||
# We need to handle grep returning 1
|
||||
set +e
|
||||
# Check request was never seen in any container
|
||||
for c in $(echo "squid-one squid-two squid-custom"); do
|
||||
echo Checking $c
|
||||
docker logs $c &> $c.txt
|
||||
grep noproxy $c.txt
|
||||
if [ $? -ne 1 ]
|
||||
then
|
||||
echo "Saw request for noproxy in $c container"
|
||||
cat $c.txt
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
docker kill squid-one squid-two squid-custom
|
||||
BIN
changedetectionio/static/favicons/android-chrome-192x192.png
Normal file
|
After Width: | Height: | Size: 33 KiB |
BIN
changedetectionio/static/favicons/android-chrome-256x256.png
Normal file
|
After Width: | Height: | Size: 40 KiB |
BIN
changedetectionio/static/favicons/apple-touch-icon.png
Normal file
|
After Width: | Height: | Size: 31 KiB |
9
changedetectionio/static/favicons/browserconfig.xml
Normal file
@@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<browserconfig>
|
||||
<msapplication>
|
||||
<tile>
|
||||
<square150x150logo src="favicons/mstile-150x150.png"/>
|
||||
<TileColor>#da532c</TileColor>
|
||||
</tile>
|
||||
</msapplication>
|
||||
</browserconfig>
|
||||
BIN
changedetectionio/static/favicons/favicon-16x16.png
Normal file
|
After Width: | Height: | Size: 13 KiB |
BIN
changedetectionio/static/favicons/favicon-32x32.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
BIN
changedetectionio/static/favicons/favicon.ico
Normal file
|
After Width: | Height: | Size: 12 KiB |
BIN
changedetectionio/static/favicons/mstile-150x150.png
Normal file
|
After Width: | Height: | Size: 15 KiB |
35
changedetectionio/static/favicons/safari-pinned-tab.svg
Normal file
@@ -0,0 +1,35 @@
|
||||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
|
||||
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
|
||||
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
|
||||
width="256.000000pt" height="256.000000pt" viewBox="0 0 256.000000 256.000000"
|
||||
preserveAspectRatio="xMidYMid meet">
|
||||
<metadata>
|
||||
Created by potrace 1.14, written by Peter Selinger 2001-2017
|
||||
</metadata>
|
||||
<g transform="translate(0.000000,256.000000) scale(0.100000,-0.100000)"
|
||||
fill="#000000" stroke="none">
|
||||
<path d="M0 1280 l0 -1280 1280 0 1280 0 0 1280 0 1280 -1280 0 -1280 0 0
|
||||
-1280z m1555 936 c387 -112 675 -426 741 -810 24 -138 15 -352 -20 -470 -106
|
||||
-353 -360 -606 -713 -712 -75 -22 -113 -27 -253 -31 -144 -5 -176 -2 -252 16
|
||||
-316 75 -564 271 -707 557 -67 136 -92 237 -98 401 -7 164 5 253 47 378 106
|
||||
315 349 556 665 659 114 37 180 45 350 41 125 -2 165 -7 240 -29z"/>
|
||||
<path d="M1091 2165 c-364 -82 -629 -328 -738 -682 -24 -80 -27 -103 -27 -258
|
||||
-1 -146 2 -182 21 -251 74 -271 259 -497 508 -621 477 -238 1061 -35 1294 450
|
||||
61 126 83 220 88 379 7 194 -15 307 -93 461 -126 251 -340 428 -614 507 -99
|
||||
29 -343 37 -439 15z m829 -473 c55 -54 100 -106 100 -116 0 -21 -184 -213
|
||||
-212 -222 -24 -7 -48 12 -48 38 0 11 26 47 58 80 l57 60 -151 -3 c-145 -4
|
||||
-152 -5 -190 -31 -22 -15 -78 -73 -124 -128 l-85 -99 -32 31 -32 31 30 38 c17
|
||||
22 70 79 117 128 66 67 97 92 127 100 22 6 106 11 188 11 81 0 147 3 147 8 0
|
||||
4 -25 31 -55 61 -55 55 -65 77 -43 99 25 25 50 10 148 -86z m-1002 -101 c46
|
||||
-24 141 -121 312 -321 203 -236 290 -330 322 -346 22 -11 60 -14 169 -12 l141
|
||||
3 -51 58 c-28 32 -51 64 -51 71 0 18 21 36 43 36 24 0 217 -193 217 -217 0
|
||||
-19 -185 -210 -212 -219 -24 -7 -48 12 -48 38 0 10 23 43 50 72 l50 53 -52 7
|
||||
c-29 3 -93 6 -142 6 -104 0 -152 12 -200 52 -19 15 -135 144 -258 286 -274
|
||||
316 -305 347 -354 361 -22 6 -94 11 -161 11 -67 0 -128 3 -137 6 -22 9 -21 61
|
||||
2 67 9 3 86 5 170 6 133 1 158 -2 190 -18z m227 -468 c23 -34 17 -43 -103
|
||||
-172 -119 -128 -131 -133 -343 -129 l-154 3 0 35 c0 34 1 35 50 42 28 3 96 7
|
||||
153 7 64 1 115 6 136 15 20 8 71 56 127 120 52 58 99 106 105 106 7 0 20 -12
|
||||
29 -27z"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.0 KiB |
19
changedetectionio/static/favicons/site.webmanifest
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "",
|
||||
"short_name": "",
|
||||
"icons": [
|
||||
{
|
||||
"src": "android-chrome-192x192.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "android-chrome-256x256.png",
|
||||
"sizes": "256x256",
|
||||
"type": "image/png"
|
||||
}
|
||||
],
|
||||
"theme_color": "#ffffff",
|
||||
"background_color": "#ffffff",
|
||||
"display": "standalone"
|
||||
}
|
||||
@@ -1,42 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="15"
|
||||
height="16.363636"
|
||||
viewBox="0 0 15 16.363636"
|
||||
version="1.1"
|
||||
id="svg4"
|
||||
sodipodi:docname="bell-off.svg"
|
||||
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview5"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:zoom="28.416667"
|
||||
inkscape:cx="-0.59824046"
|
||||
inkscape:cy="12"
|
||||
inkscape:window-width="1554"
|
||||
inkscape:window-height="896"
|
||||
inkscape:window-x="2095"
|
||||
inkscape:window-y="107"
|
||||
inkscape:window-maximized="0"
|
||||
inkscape:current-layer="svg4" />
|
||||
<defs
|
||||
id="defs8" />
|
||||
<path
|
||||
d="m 14.318182,11.762045 v 1.1925 H 5.4102273 L 11.849318,7.1140909 C 12.234545,9.1561364 12.54,11.181818 14.318182,11.762045 Z m -6.7984093,4.601591 c 1.0759091,0 2.0256823,-0.955909 2.0256823,-2.045454 H 5.4545455 c 0,1.089545 0.9879545,2.045454 2.0652272,2.045454 z M 15,2.8622727 0.9177273,15.636136 0,14.627045 l 1.8443182,-1.6725 h -1.1625 v -1.1925 C 4.0070455,10.677273 2.1784091,4.5388636 5.3611364,2.6897727 5.8009091,2.4347727 6.0709091,1.9609091 6.0702273,1.4488636 v -0.00205 C 6.0702273,0.64772727 6.7104545,0 7.5,0 8.2895455,0 8.9297727,0.64772727 8.9297727,1.4468182 v 0.00205 C 8.9290909,1.9602319 9.199773,2.4354591 9.638864,2.6897773 10.364318,3.111141 10.827273,3.7568228 11.1525,4.5129591 L 14.085682,1.8531818 Z M 6.8181818,1.3636364 C 6.8181818,1.74 7.1236364,2.0454545 7.5,2.0454545 7.8763636,2.0454545 8.1818182,1.74 8.1818182,1.3636364 8.1818182,0.98795455 7.8763636,0.68181818 7.5,0.68181818 c -0.3763636,0 -0.6818182,0.30613637 -0.6818182,0.68181822 z"
|
||||
id="path2"
|
||||
style="fill:#f8321b;stroke-width:0.681818;fill-opacity:1" />
|
||||
<svg width="15" height="16.363636" viewBox="0 0 15 16.363636" xmlns="http://www.w3.org/2000/svg" xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<path d="m 14.318182,11.762045 v 1.1925 H 5.4102273 L 11.849318,7.1140909 C 12.234545,9.1561364 12.54,11.181818 14.318182,11.762045 Z m -6.7984093,4.601591 c 1.0759091,0 2.0256823,-0.955909 2.0256823,-2.045454 H 5.4545455 c 0,1.089545 0.9879545,2.045454 2.0652272,2.045454 z M 15,2.8622727 0.9177273,15.636136 0,14.627045 l 1.8443182,-1.6725 h -1.1625 v -1.1925 C 4.0070455,10.677273 2.1784091,4.5388636 5.3611364,2.6897727 5.8009091,2.4347727 6.0709091,1.9609091 6.0702273,1.4488636 v -0.00205 C 6.0702273,0.64772727 6.7104545,0 7.5,0 8.2895455,0 8.9297727,0.64772727 8.9297727,1.4468182 v 0.00205 C 8.9290909,1.9602319 9.199773,2.4354591 9.638864,2.6897773 10.364318,3.111141 10.827273,3.7568228 11.1525,4.5129591 L 14.085682,1.8531818 Z M 6.8181818,1.3636364 C 6.8181818,1.74 7.1236364,2.0454545 7.5,2.0454545 7.8763636,2.0454545 8.1818182,1.74 8.1818182,1.3636364 8.1818182,0.98795455 7.8763636,0.68181818 7.5,0.68181818 c -0.3763636,0 -0.6818182,0.30613637 -0.6818182,0.68181822 z" id="path2" style="fill:#f8321b;stroke-width:0.681818;fill-opacity:1"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 2.1 KiB After Width: | Height: | Size: 1.2 KiB |
58
changedetectionio/static/images/brightdata.svg
Normal file
@@ -0,0 +1,58 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
height="59.553207"
|
||||
viewBox="-0.36 95.21 25.082135 59.553208"
|
||||
width="249.99138"
|
||||
version="1.1"
|
||||
id="svg12"
|
||||
sodipodi:docname="brightdata.svg"
|
||||
inkscape:version="1.1.2 (0a00cf5339, 2022-02-04)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs16" />
|
||||
<sodipodi:namedview
|
||||
id="namedview14"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:zoom="0.9464"
|
||||
inkscape:cx="22.189349"
|
||||
inkscape:cy="-90.870668"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1051"
|
||||
inkscape:window-x="1920"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg12" />
|
||||
<path
|
||||
d="m -34.416031,129.28 c -3.97,-2.43 -5.1,-6.09 -4.32,-10.35 0.81,-4.4 3.95,-6.75 8.04,-7.75 4.23,-1.04 8.44,-0.86 12.3,1.5 0.63,0.39 0.93,0.03 1.31,-0.29 1.5,-1.26 3.27,-1.72 5.189999,-1.83 0.79,-0.05 1.04,0.24 1.01,1.01 -0.05,1.31 -0.04,2.63 0,3.95 0.02,0.65 -0.19,0.93 -0.87,0.89 -0.889999,-0.04 -1.789999,0.03 -2.669999,-0.02 -0.82,-0.04 -1.08,0.1 -0.88,1.04 0.83,3.9 -0.06,7.37 -3.1,10.06 -2.76,2.44 -6.13,3.15 -9.72,3.04 -0.51,-0.02 -1.03,-0.02 -1.52,-0.13 -1.22,-0.25 -1.96,0.14 -2.19,1.41 -0.28,1.54 0.16,2.62 1.37,3.07 0.84,0.31 1.74,0.35 2.63,0.39 2.97,0.13 5.95,-0.18 8.91,0.21 2.93,0.39 5.69,1.16 6.85,4.25 1.269999,3.38 0.809999,6.62 -1.48,9.47 -2.73,3.39 -6.52,4.78 -10.66,5.33 -3.53,0.48 -7.04,0.27 -10.39,-1.11 -3.89,-1.6 -5.75,-4.95 -4.84,-8.72 0.51,-2.11 1.85,-3.58 3.69,-4.65 0.38,-0.22 0.93,-0.32 0.28,-0.96 -2.91,-2.83 -2.85,-6.16 0.1,-8.95 0.28,-0.26 0.6,-0.53 0.96,-0.86 z m 8.07,21.5 c 0.95,0.04 1.87,-0.13 2.78,-0.33 1.89,-0.42 3.51,-1.3 4.49,-3.06 1.82,-3.25 0.24,-6.2 -3.37,-6.58 -2.88,-0.3 -5.76,0.24 -8.63,-0.13 -0.53,-0.07 -0.75,0.34 -0.95,0.71 -1.16,2.24 -1.08,4.53 0,6.73 1.15,2.34 3.46,2.48 5.68,2.66 z m -5,-30.61 c -0.03,1.67 0.08,3.19 0.74,4.61 0.76,1.62 2.17,2.42 4.03,2.31 1.62,-0.1 2.9,-1.12 3.36,-2.84 0.66,-2.46 0.69,-4.95 0.01,-7.42 -0.49,-1.76 -1.7,-2.64 -3.56,-2.7 -2.08,-0.07 -3.37,0.7 -4.04,2.42 -0.47,1.21 -0.6,2.47 -0.54,3.62 z m 32.9399993,6.56 c 0,2.59 0.05,5.18 -0.02,7.77 -0.03,1.03 0.31,1.46 1.32,1.52 0.65,0.04 1.61,-0.09 1.82,0.57 0.26,0.81 0.11,1.76 0.06,2.65 -0.03,0.48 -0.81,0.39 -0.81,0.39 l -11.47,0.01 c 0,0 -0.95,-0.21 -0.88,-0.88 0.03,-0.29 0.04,-0.6 0,-0.89 -0.19,-1.24 0.21,-1.92 1.58,-1.9 0.99,0.01 1.28,-0.52 1.28,-1.53 -0.05,-8.75 -0.05,-17.49 0,-26.24 0.01,-1.15 -0.36,-1.62 -1.44,-1.67 -0.17,-0.01 -0.34,-0.04 -0.5,-0.07 -1.43,-0.22 -2.12,-1.57 -1.53,-2.91 0.15,-0.35 0.43,-0.36 0.72,-0.4 2.94,-0.41 5.88,-0.81 8.82000002,-1.23 0.81999998,-0.12 0.99999998,0.27 0.98999998,1.01 -0.02,3.35 0,6.71 0.02,10.06 0,0.35 -0.23,0.84 0.18,1.03 0.38,0.17 0.69,-0.25 0.99,-0.45 2.56,-1.74 5.33,-2.73 8.4900007,-2.56 3.51005,0.19 5.65005,1.95 6.35005,5.46 0.42,2.09 0.52,4.21 0.51,6.33 -0.02,3.86 0.05,7.73 -0.04,11.59 -0.02,1.12 0.37,1.5 1.39,1.6 0.61,0.05 1.55,-0.13 1.74,0.47 0.26,0.85 0.12,1.84 0.1,2.77 -0.01,0.41 -0.69,0.37 -0.69,0.37 l -11.4700504,0.01 c 0,0 -0.81,-0.29 -0.8,-0.85 0.01,-0.38 0.04,-0.77 -0.01,-1.15 -0.13,-1.01 0.32,-1.52 1.31,-1.56 1.0600004,-0.05 1.3800004,-0.55 1.3500004,-1.63 -0.14,-4.84 0.16,-9.68 -0.18,-14.51 -0.26,-3.66 -2.1100004,-4.95 -5.6700007,-3.99 -0.25,0.07 -0.49,0.15 -0.73,0.22 -2.57,0.8 -2.79,1.09 -2.79,3.71 0.01,2.3 0.01,4.59 0.01,6.88 z M -109.26603,122.56 c 0,-4.75 -0.02,-9.51 0.02,-14.26 0.01,-0.92 -0.17,-1.47 -1.19,-1.45 -0.16,0 -0.33,-0.07 -0.5,-0.1 -1.56,-0.27 -2.24,-1.47 -1.69,-2.92 0.14,-0.37 0.41,-0.38 0.7,-0.42 2.98,-0.41 5.97,-0.81 8.94,-1.24 0.85,-0.12 0.88,0.33 0.88,0.96 -0.01,3.01 -0.01,6.03 0,9.04 0,0.4 -0.18,0.96 0.27,1.16 0.36,0.16 0.66,-0.3 0.96,-0.52 4.729999,-3.51 12.459999,-2.61 14.889999,4.48 1.89,5.51 1.91,11.06 -0.96,16.28 -2.37,4.31 -6.19,6.49 -11.15,6.59 -3.379999,0.07 -6.679999,-0.3 -9.909999,-1.37 -0.93,-0.31 -1.3,-0.78 -1.28,-1.83 0.05,-4.81 0.02,-9.6 0.02,-14.4 z m 7.15,3.89 c 0,2.76 0.02,5.52 -0.01,8.28 -0.01,0.76 0.18,1.29 0.91,1.64 1.899999,0.9 4.299999,0.5 5.759999,-1.01 0.97,-1 1.56,-2.21 1.96,-3.52 1.03,-3.36 0.97,-6.78 0.61,-10.22 a 9.991,9.991 0 0 0 -0.93,-3.29 c -1.47,-3.06 -4.67,-3.85 -7.439999,-1.86 -0.6,0.43 -0.88,0.93 -0.87,1.7 0.04,2.76 0.01,5.52 0.01,8.28 z"
|
||||
fill="#4280f6"
|
||||
id="path2" />
|
||||
<path
|
||||
d="m 68.644019,137.2 c -1.62,1.46 -3.41,2.56 -5.62,2.96 -4.4,0.8 -8.7,-1.39 -10.49,-5.49 -2.31,-5.31 -2.3,-10.67 -0.1,-15.98 2.31,-5.58 8.29,-8.65 14.24,-7.46 1.71,0.34 1.9,0.18 1.9,-1.55 0,-0.68 -0.05,-1.36 0.01,-2.04 0.09,-1.02 -0.25,-1.54 -1.34,-1.43 -0.64,0.06 -1.26,-0.1 -1.88,-0.21 -1.32,-0.24 -1.6,-0.62 -1.37,-1.97 0.07,-0.41 0.25,-0.57 0.65,-0.62 2.63,-0.33 5.27,-0.66 7.9,-1.02 1.04,-0.14 1.17,0.37 1.17,1.25 -0.02,10.23 -0.02,20.45 -0.01,30.68 v 1.02 c 0.02,0.99 0.35,1.6 1.52,1.47 0.52,-0.06 1.35,-0.27 1.25,0.73 -0.08,0.8 0.58,1.93 -0.94,2.18 -1.29,0.22 -2.51,0.69 -3.86,0.65 -2.04,-0.06 -2.3,-0.23 -2.76,-2.19 -0.09,-0.3 0.06,-0.67 -0.27,-0.98 z m -0.07,-12.46 c 0,-2.8 -0.04,-5.6 0.02,-8.39 0.02,-0.9 -0.28,-1.47 -1.05,-1.81 -3.18,-1.4 -7.54,-0.8 -9.3,2.87 -0.83,1.74 -1.31,3.54 -1.49,5.46 -0.28,2.93 -0.38,5.83 0.61,8.65 0.73,2.09 1.81,3.9 4.11,4.67 2.49,0.83 4.55,-0.04 6.5,-1.48 0.54,-0.4 0.62,-0.95 0.61,-1.57 -0.02,-2.8 -0.01,-5.6 -0.01,-8.4 z m 28.79,2.53 c 0,3.24 0.04,5.83 -0.02,8.41 -0.02,1 0.19,1.49 1.309998,1.41 0.55,-0.04 1.460003,-0.46 1.520003,0.73 0.05,1.02 0.1,1.89 -1.330003,2.08 -1.289998,0.17 -2.559998,0.51 -3.889998,0.48 -1.88,-0.05 -2.15,-0.26 -2.42,-2.15 -0.04,-0.27 0.14,-0.65 -0.22,-0.79 -0.34,-0.13 -0.5,0.24 -0.72,0.42 -3.61,3 -8.15,3.4 -11.64,1.08 -1.61,-1.07 -2.49,-2.63 -2.67,-4.43 -0.51,-5.13 0.77,-7.91 6.3,-10.22 2.44,-1.02 5.07,-1.27 7.68,-1.49 0.77,-0.07 1.03,-0.28 1.02,-1.05 -0.03,-1.48 -0.05,-2.94 -0.64,-4.36 -0.59,-1.42 -1.67,-1.92 -3.08,-2.03 -3.04,-0.24 -5.88,0.5 -8.63,1.71 -0.51,0.23 -1.19,0.75 -1.48,-0.13 -0.26,-0.77 -1.35,-1.61 0.05,-2.47 3.27,-2 6.7,-3.44 10.61,-3.42 1.44,0.01 2.88,0.27 4.21,0.81 2.67,1.08 3.44,3.4 3.8,5.99 0.46,3.37 0.1,6.73 0.24,9.42 z m -5.09,2.9 c 0,-1.23 -0.01,-2.46 0,-3.69 0,-0.52 -0.06,-0.98 -0.75,-0.84 -1.45,0.3 -2.93,0.28 -4.37,0.69 -3.71,1.04 -5.46,4.48 -3.97,8.03 0.51,1.22 1.48,1.98 2.79,2.16 2.01,0.28 3.86,-0.29 5.6,-1.28 0.54,-0.31 0.73,-0.76 0.72,-1.37 -0.05,-1.23 -0.02,-2.47 -0.02,-3.7 z m 43.060001,-2.89 c 0,2.72 0.01,5.43 -0.01,8.15 0,0.66 0.02,1.21 0.91,1.12 0.54,-0.06 0.99,0.12 0.86,0.75 -0.15,0.71 0.56,1.7 -0.58,2.09 -1.55,0.52 -3.16,0.59 -4.77,0.4 -0.99,-0.12 -1.12,-1.01 -1.18,-1.73 -0.08,-1.15 -0.16,-1.45 -1.24,-0.54 -3.41,2.87 -8.05,3.17 -11.43,0.88 -1.75,-1.18 -2.49,-2.91 -2.7,-4.94 -0.64,-6.24 3.16,-8.74 7.83,-10.17 2.04,-0.62 4.14,-0.8 6.24,-0.99 0.81,-0.07 1,-0.36 0.98,-1.09 -0.04,-1.31 0.04,-2.62 -0.42,-3.89 -0.57,-1.57 -1.53,-2.34 -3.18,-2.45 -3.03,-0.21 -5.88,0.46 -8.64,1.66 -0.6,0.26 -1.25,0.81 -1.68,-0.2 -0.34,-0.8 -1.08,-1.61 0.16,-2.36 4.12,-2.5 8.44,-4.16 13.36,-3.07 3.21,0.71 4.89,2.91 5.26,6.34 0.18,1.69 0.22,3.37 0.22,5.07 0.01,1.66 0.01,3.32 0.01,4.97 z m -5.09,2.54 c 0,-1.27 -0.03,-2.54 0.01,-3.81 0.02,-0.74 -0.27,-1.02 -0.98,-0.92 -1.21,0.17 -2.43,0.28 -3.62,0.55 -3.72,0.83 -5.47,3.48 -4.82,7.21 0.29,1.66 1.57,2.94 3.21,3.16 2.02,0.27 3.85,-0.34 5.57,-1.34 0.49,-0.29 0.64,-0.73 0.63,-1.29 -0.02,-1.18 0,-2.37 0,-3.56 z"
|
||||
fill="#c8dbfb"
|
||||
id="path4" />
|
||||
<path
|
||||
d="m 26.314019,125.77 c 0,-2.89 -0.05,-5.77 0.02,-8.66 0.03,-1.04 -0.33,-1.39 -1.31,-1.24 a 0.7,0.7 0 0 1 -0.25,0 c -0.57,-0.18 -1.44,0.48 -1.68,-0.58 -0.35,-1.48 -0.02,-2.3 1.21,-2.7 1.3,-0.43 2.16,-1.26 2.76,-2.46 0.78,-1.56 1.44,-3.17 1.91,-4.84 0.18,-0.63 0.47,-0.86 1.15,-0.88 3.28,-0.09 3.27,-0.11 3.32,3.17 0.01,1.06 0.09,2.12 0.09,3.18 -0.01,0.67 0.27,0.89 0.91,0.88 1.61,-0.02 3.23,0.03 4.84,-0.02 0.77,-0.02 1.01,0.23 1.03,1.01 0.08,3.27 0.1,3.27 -3.09,3.27 -0.93,0 -1.87,0.03 -2.8,-0.01 -0.67,-0.02 -0.89,0.26 -0.88,0.91 0.04,5.43 0.04,10.86 0.12,16.29 0.02,1.7 0.75,2.26 2.46,2.1 1.1,-0.1 2.19,-0.26 3.23,-0.65 0.59,-0.22 0.89,-0.09 1.14,0.53 0.93,2.29 0.92,2.37 -1.32,3.52 -2.54,1.3 -5.22,1.99 -8.1,1.79 -2.27,-0.16 -3.68,-1.27 -4.35,-3.45 -0.3,-0.98 -0.41,-1.99 -0.41,-3.01 z m -97.67005,-8.99 c 0.57,-0.84 1.11,-1.74 1.76,-2.55 1.68,-2.09 3.68,-3.62 6.54,-3.66 1.08,-0.01 1.63,0.28 1.57,1.52 -0.1,2.08 -0.05,4.16 -0.02,6.24 0.01,0.74 -0.17,0.96 -0.96,0.76 -2.36,-0.59 -4.71,-0.42 -7.03,0.28 -0.8,0.24 -1.16,0.62 -1.15,1.52 0.05,4.5 0.04,9 0,13.5 -0.01,0.89 0.29,1.16 1.15,1.2 1.23,0.06 2.44,0.32 3.67,0.39 0.75,0.05 0.91,0.38 0.89,1.04 -0.06,2.86 0.29,2.28 -2.25,2.3 -4.2,0.04 -8.41,-0.02 -12.61,0.03 -0.91,0.01 -1.39,-0.18 -1.22,-1.18 0.02,-0.12 0,-0.25 0,-0.38 0.02,-2.1 -0.24,-1.88 1.77,-2.04 1.33,-0.11 1.6,-0.67 1.58,-1.9 -0.07,-5.35 -0.04,-10.7 -0.02,-16.05 0,-0.78 -0.17,-1.2 -1,-1.46 -2.21,-0.68 -2.7,-1.69 -2.22,-3.99 0.11,-0.52 0.45,-0.56 0.82,-0.62 2.22,-0.34 4.44,-0.7 6.67,-0.99 0.99,-0.13 1.82,0.7 1.84,1.76 0.03,1.4 0.03,2.8 0.04,4.2 -0.01,0.02 0.06,0.04 0.18,0.08 z m 25.24,6.59 c 0,3.69 0.04,7.38 -0.03,11.07 -0.02,1.04 0.31,1.48 1.32,1.49 0.29,0 0.59,0.12 0.88,0.13 0.93,0.01 1.18,0.47 1.16,1.37 -0.05,2.19 0,2.19 -2.24,2.19 -3.48,0 -6.96,-0.04 -10.44,0.03 -1.09,0.02 -1.47,-0.33 -1.3,-1.36 0.02,-0.12 0.02,-0.26 0,-0.38 -0.28,-1.39 0.39,-1.96 1.7,-1.9 1.36,0.06 1.76,-0.51 1.74,-1.88 -0.09,-5.17 -0.08,-10.35 0,-15.53 0.02,-1.22 -0.32,-1.87 -1.52,-2.17 -0.57,-0.14 -1.47,-0.11 -1.57,-0.85 -0.15,-1.04 -0.05,-2.11 0.01,-3.17 0.02,-0.34 0.44,-0.35 0.73,-0.39 2.81,-0.39 5.63,-0.77 8.44,-1.18 0.92,-0.14 1.15,0.2 1.14,1.09 -0.04,3.8 -0.02,7.62 -0.02,11.44 z"
|
||||
fill="#4280f6"
|
||||
id="path6" />
|
||||
<path
|
||||
d="m 101.44402,125.64 c 0,-3.18 -0.03,-6.37 0.02,-9.55 0.02,-0.94 -0.26,-1.36 -1.22,-1.22 -0.21,0.03 -0.430003,0.04 -0.630003,0 -0.51,-0.12 -1.35,0.39 -1.44,-0.55 -0.08,-0.85 -0.429998,-1.87 0.93,-2.24 2.080003,-0.57 2.720003,-2.39 3.350003,-4.17 0.31,-0.88 0.62,-1.76 0.87,-2.66 0.18,-0.64 0.52,-0.85 1.19,-0.84 2.46,0.05 2,-0.15 2.04,2.04 0.02,1.1 0.08,2.21 -0.02,3.31 -0.11,1.16 0.46,1.52 1.46,1.53 1.78,0.01 3.57,0.04 5.35,-0.01 0.82,-0.02 1.12,0.23 1.11,1.08 -0.05,2.86 0.19,2.49 -2.42,2.51 -1.53,0.01 -3.06,0.02 -4.59,-0.01 -0.65,-0.01 -0.9,0.22 -0.9,0.89 0.02,5.52 0,11.04 0.03,16.56 0,0.67 0.14,1.34 0.25,2.01 0.17,1.04 1.17,1.62 2.59,1.42 1.29,-0.19 2.57,-0.49 3.86,-0.69 0.43,-0.07 1.05,-0.47 1.19,0.4 0.12,0.75 1.05,1.61 -0.09,2.24 -2.09,1.16 -4.28,2.07 -6.71,2.16 -1.05,0.04 -2.13,0.2 -3.16,-0.14 -1.92,-0.65 -3.03,-2.28 -3.05,-4.51 -0.02,-3.19 -0.01,-6.37 -0.01,-9.56 z"
|
||||
fill="#c8dbfb"
|
||||
id="path8" />
|
||||
<path
|
||||
d="m -50.816031,95.21 c 0.19,2.160002 1.85,3.240002 2.82,4.740002 0.25,0.379998 0.48,0.109998 0.67,-0.16 0.21,-0.31 0.6,-1.21 1.15,-1.28 -0.35,1.38 -0.04,3.149998 0.16,4.449998 0.49,3.05 -1.22,5.64 -4.07,6.18 -3.38,0.65 -6.22,-2.21 -5.6,-5.62 0.23,-1.24 1.37,-2.5 0.77,-3.699998 -0.85,-1.7 0.54,-0.52 0.79,-0.22 1.04,1.199998 1.21,0.09 1.45,-0.55 0.24,-0.63 0.31,-1.31 0.47,-1.97 0.19,-0.770002 0.55,-1.400002 1.39,-1.870002 z"
|
||||
fill="#4280f6"
|
||||
id="path10" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 11 KiB |
37
changedetectionio/static/images/email.svg
Normal file
@@ -0,0 +1,37 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
||||
|
||||
<svg
|
||||
fill="#FFFFFF"
|
||||
height="7.5005589"
|
||||
width="11.248507"
|
||||
version="1.1"
|
||||
id="Layer_1"
|
||||
viewBox="0 0 7.1975545 4.7993639"
|
||||
xml:space="preserve"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"><defs
|
||||
id="defs19" />
|
||||
<g
|
||||
id="g14"
|
||||
transform="matrix(-0.01406065,0,0,0.01406065,7.1975543,-1.1990922)">
|
||||
<g
|
||||
id="g12">
|
||||
<g
|
||||
id="g10">
|
||||
<path
|
||||
d="M 468.373,85.28 H 45.333 C 21.227,85.28 0,105.76 0,129.014 V 383.2 c 0,23.147 21.227,43.413 45.333,43.413 h 422.933 c 23.68,0 43.627,-19.84 43.627,-43.413 V 129.014 C 512,105.334 492.053,85.28 468.373,85.28 Z m 0,320 H 45.333 c -12.373,0 -24,-10.773 -24,-22.08 V 129.014 c 0,-11.307 11.84,-22.4 24,-22.4 h 422.933 c 11.733,0 22.293,10.667 22.293,22.4 V 383.2 h 0.107 c 10e-4,11.734 -10.453,22.08 -22.293,22.08 z"
|
||||
id="path2" />
|
||||
<path
|
||||
d="m 440.853,153.974 c -3.307,-4.907 -9.92,-6.187 -14.827,-2.987 L 256,264.48 85.973,151.094 c -4.907,-3.2 -11.52,-1.707 -14.72,3.2 -3.093,4.8 -1.813,11.307 2.88,14.507 l 176,117.333 c 3.627,2.347 8.213,2.347 11.84,0 l 176,-117.333 c 4.8,-3.201 6.187,-9.921 2.88,-14.827 z"
|
||||
id="path4" />
|
||||
<path
|
||||
d="m 143.573,257.654 c -0.107,0.107 -0.32,0.213 -0.427,0.32 L 68.48,311.307 c -4.907,3.307 -6.187,9.92 -2.88,14.827 3.307,4.907 9.92,6.187 14.827,2.88 0.107,-0.107 0.32,-0.213 0.427,-0.32 l 74.667,-53.333 c 4.907,-3.307 6.187,-9.92 2.88,-14.827 -3.308,-4.907 -9.921,-6.187 -14.828,-2.88 z"
|
||||
id="path6" />
|
||||
<path
|
||||
d="m 443.947,311.627 c -0.107,-0.107 -0.32,-0.213 -0.427,-0.32 l -74.667,-53.333 c -4.693,-3.52 -11.413,-2.56 -14.933,2.133 -3.52,4.693 -2.56,11.413 2.133,14.933 0.107,0.107 0.32,0.213 0.427,0.32 l 74.667,53.333 c 4.693,3.52 11.413,2.56 14.933,-2.133 3.52,-4.693 2.56,-11.413 -2.133,-14.933 z"
|
||||
id="path8" />
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.9 KiB |
|
Before Width: | Height: | Size: 31 KiB |
3
changedetectionio/static/images/generic-icon.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
<svg width="61.649mm" height="61.649mm" version="1.1" viewBox="0 0 61.649 61.649" xml:space="preserve" xmlns="http://www.w3.org/2000/svg"><g transform="translate(66.269 -15.463)" fill="#3056d3"><g transform="matrix(1.423 0 0 1.423 101.16 69.23)" fill="#3056d3"><g transform="matrix(.8229 0 0 .8229 -23.378 -2.3935)" fill="#3056d3"><path d="m-88.248-43.007a26.323 26.323 0 0 0-26.323 26.323 26.323 26.323 0 0 0 26.323 26.323 26.323 26.323 0 0 0 26.323-26.323 26.323 26.323 0 0 0-26.323-26.323zm0 2.8417a23.482 23.482 0 0 1 23.482 23.482 23.482 23.482 0 0 1-23.482 23.482 23.482 23.482 0 0 1-23.482-23.482 23.482 23.482 0 0 1 23.482-23.482z"/><g transform="matrix(.26458 0 0 .26458 -115.65 -44.085)"><path d="m33.02 64.43c0.35-0.05 2.04-0.13 2.04-0.13h25.53s3.17 0.32 3.67 0.53c2.5 1.05 3.98 1.89 6.04 3.57 0.72 0.58 4.12 4.01 4.12 4.01l51.67 57.39s1.61 1.65 1.97 1.94c1.2 0.97 2.48 1.96 3.98 2.32 0.5 0.12 2.72 0.21 2.72 0.21h27.32l-8.83-9.04s-1.31-1.65-1.44-1.94c-0.45-0.93-0.59-2.59-0.13-3.51 0.35-0.69 1.46-1.87 2.23-1.98 1.03-0.14 2.12-0.39 3.02 0.14 0.33 0.2 1.64 1.32 1.64 1.32l17.49 17.49s1.35 1.09 1.6 1.6c0.17 0.34 0.29 0.82 0.15 1.18-0.17 0.42-1.42 1.63-1.42 1.63l-0.94 0.98-15.69 16.37s-1.44 1.4-1.79 1.67c-0.76 0.6-1.99 0.89-2.96 0.9-1.03 0-2.62-1.11-3.26-1.91-0.6-0.76-1.1-2.22-0.77-3.13 0.16-0.45 1.28-1.85 1.28-1.85l11.36-11.3-29.47-0.02-1.68 0.09s-4.16-0.66-5.26-1.03c-1.63-0.56-3.44-1.82-4.75-2.93-0.39-0.33-1.8-1.92-1.8-1.92l-51.7-59.28s-2-2.06-2.43-2.43c-1.37-1.17-2-1.62-3.76-2.34-0.44-0.18-3.45-0.55-3.45-0.55l-24.13-0.22s-2.23-0.15-2.61-0.22c-1.08-0.21-2.16-1.07-2.81-1.83-0.79-0.92-0.59-3.06 0.06-4.09 0.57-0.89 2.14-1.52 3.19-1.66z"/><path d="m86.1 109.7-17.13 19.65s-2 2.06-2.43 2.43c-1.37 1.17-2 1.62-3.76 2.34-0.44 0.18-3.45 0.55-3.45 0.55l-24.13 0.22s-2.23 0.15-2.61 0.22c-1.08 0.21-2.16 1.07-2.81 1.83-0.79 0.92-0.59 3.06 0.06 4.09 0.57 0.89 2.14 1.52 3.19 1.66 0.35 0.05 2.04 0.13 2.04 0.13h25.53s3.17-0.32 3.67-0.53c2.5-1.05 3.98-1.89 6.04-3.57 0.72-0.58 4.12-4.01 4.12-4.01l17.38-19.3z"/><path d="m177.81 67.6c-0.17-0.42-1.42-1.63-1.42-1.63l-0.94-0.98-15.69-16.37s-1.44-1.4-1.79-1.67c-0.76-0.6-1.99-0.89-2.96-0.9-1.03 0-2.62 1.11-3.26 1.91-0.6 0.76-1.1 2.22-0.77 3.13 0.16 0.45 1.28 1.85 1.28 1.85l11.36 11.3-29.47 0.02-1.68-0.09s-4.16 0.66-5.26 1.03c-1.63 0.56-3.44 1.82-4.75 2.93-0.39 0.33-1.8 1.92-1.8 1.92l-18.91 21.69 5.98 5.98 18.38-20.41s1.61-1.65 1.97-1.94c1.2-0.97 2.48-1.96 3.98-2.32 0.5-0.12 2.72-0.21 2.72-0.21h27.32l-8.83 9.04s-1.31 1.65-1.44 1.94c-0.45 0.93-0.59 2.59-0.13 3.51 0.35 0.69 1.46 1.87 2.23 1.98 1.03 0.14 2.12 0.39 3.02-0.14 0.33-0.2 1.64-1.32 1.64-1.32l17.49-17.49s1.35-1.09 1.6-1.6c0.17-0.34 0.29-0.82 0.15-1.18z"/></g></g></g></g></svg>
|
||||
|
After Width: | Height: | Size: 2.7 KiB |
|
Before Width: | Height: | Size: 43 KiB After Width: | Height: | Size: 22 KiB |
57
changedetectionio/static/images/oxylabs.svg
Normal file
|
After Width: | Height: | Size: 9.7 KiB |
9
changedetectionio/static/images/pdf-icon.svg
Normal file
@@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="75.320129mm" height="92.604164mm" viewBox="0 0 75.320129 92.604164">
|
||||
<g transform="translate(53.548057 -183.975276) scale(1.4843)">
|
||||
<path fill="#ff2116" d="M-29.632812 123.94727c-3.551967 0-6.44336 2.89347-6.44336 6.44531v49.49804c0 3.55185 2.891393 6.44532 6.44336 6.44532H8.2167969c3.5519661 0 6.4433591-2.89335 6.4433591-6.44532v-40.70117s.101353-1.19181-.416015-2.35156c-.484969-1.08711-1.275391-1.84375-1.275391-1.84375a1.0584391 1.0584391 0 0 0-.0059-.008l-9.3906254-9.21094a1.0584391 1.0584391 0 0 0-.015625-.0156s-.8017392-.76344-1.9902344-1.27344c-1.39939552-.6005-2.8417968-.53711-2.8417968-.53711l.021484-.002z" color="#000" font-family="sans-serif" overflow="visible" paint-order="markers fill stroke" style="line-height:normal;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;text-transform:none;text-orientation:mixed;white-space:normal;shape-padding:0;isolation:auto;mix-blend-mode:normal;solid-color:#000000;solid-opacity:1"/>
|
||||
<path fill="#f5f5f5" d="M-29.632812 126.06445h28.3789058a1.0584391 1.0584391 0 0 0 .021484 0s1.13480448.011 1.96484378.36719c.79889772.34282 1.36536982.86176 1.36914062.86524.0000125.00001.00391.004.00391.004l9.3671868 9.18945s.564354.59582.837891 1.20899c.220779.49491.234375 1.40039.234375 1.40039a1.0584391 1.0584391 0 0 0-.002.0449v40.74609c0 2.41592-1.910258 4.32813-4.3261717 4.32813H-29.632812c-2.415914 0-4.326172-1.91209-4.326172-4.32813v-49.49804c0-2.41603 1.910258-4.32813 4.326172-4.32813z" color="#000" font-family="sans-serif" overflow="visible" paint-order="markers fill stroke" style="line-height:normal;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;text-transform:none;text-orientation:mixed;white-space:normal;shape-padding:0;isolation:auto;mix-blend-mode:normal;solid-color:#000000;solid-opacity:1"/>
|
||||
<path fill="#ff2116" d="M-23.40766 161.09299c-1.45669-1.45669.11934-3.45839 4.39648-5.58397l2.69124-1.33743 1.04845-2.29399c.57665-1.26169 1.43729-3.32036 1.91254-4.5748l.8641-2.28082-.59546-1.68793c-.73217-2.07547-.99326-5.19438-.52872-6.31588.62923-1.51909 2.69029-1.36323 3.50626.26515.63727 1.27176.57212 3.57488-.18329 6.47946l-.6193 2.38125.5455.92604c.30003.50932 1.1764 1.71867 1.9475 2.68743l1.44924 1.80272 1.8033728-.23533c5.72900399-.74758 7.6912472.523 7.6912472 2.34476 0 2.29921-4.4984914 2.48899-8.2760865-.16423-.8499666-.59698-1.4336605-1.19001-1.4336605-1.19001s-2.3665326.48178-3.531704.79583c-1.202707.32417-1.80274.52719-3.564509 1.12186 0 0-.61814.89767-1.02094 1.55026-1.49858 2.4279-3.24833 4.43998-4.49793 5.1723-1.3991.81993-2.86584.87582-3.60433.13733zm2.28605-.81668c.81883-.50607 2.47616-2.46625 3.62341-4.28553l.46449-.73658-2.11497 1.06339c-3.26655 1.64239-4.76093 3.19033-3.98386 4.12664.43653.52598.95874.48237 2.01093-.16792zm21.21809-5.95578c.80089-.56097.68463-1.69142-.22082-2.1472-.70466-.35471-1.2726074-.42759-3.1031574-.40057-1.1249.0767-2.9337647.3034-3.2403347.37237 0 0 .993716.68678 1.434896.93922.58731.33544 2.0145161.95811 3.0565161 1.27706 1.02785.31461 1.6224.28144 2.0729-.0409zm-8.53152-3.54594c-.4847-.50952-1.30889-1.57296-1.83152-2.3632-.68353-.89643-1.02629-1.52887-1.02629-1.52887s-.4996 1.60694-.90948 2.57394l-1.27876 3.16076-.37075.71695s1.971043-.64627 2.97389-.90822c1.0621668-.27744 3.21787-.70134 3.21787-.70134zm-2.74938-11.02573c.12363-1.0375.1761-2.07346-.15724-2.59587-.9246-1.01077-2.04057-.16787-1.85154 2.23517.0636.8084.26443 2.19033.53292 3.04209l.48817 1.54863.34358-1.16638c.18897-.64151.47882-2.02015.64411-3.06364z"/>
|
||||
<path fill="#2c2c2c" d="M-20.930423 167.83862h2.364986q1.133514 0 1.840213.2169.706698.20991 1.189489.9446.482795.72769.482795 1.75625 0 .94459-.391832 1.6233-.391833.67871-1.056548.97958-.65772.30087-2.02913.30087h-.818651v3.72941h-1.581322zm1.581322 1.22447v3.33058h.783664q1.049552 0 1.44838-.39184.405826-.39183.405826-1.27345 0-.65772-.265887-1.06355-.265884-.41282-.587747-.50378-.314866-.098-1.000572-.098zm5.50664-1.22447h2.148082q1.560333 0 2.4909318.55276.9375993.55276 1.4133973 1.6443.482791 1.09153.482791 2.42096 0 1.3994-.4338151 2.49793-.4268149 1.09153-1.3154348 1.76324-.8816233.67172-2.5189212.67172h-2.267031zm1.581326 1.26645v7.018h.657715q1.378411 0 2.001144-.9516.6227329-.95858.6227329-2.5539 0-3.5125-2.6238769-3.5125zm6.4722254-1.26645h5.30372941v1.26645H-4.2075842v2.85478h2.9807225v1.26646h-2.9807225v4.16322h-1.5813254z" font-family="Franklin Gothic Medium Cond" letter-spacing="0" style="line-height:125%;-inkscape-font-specification:'Franklin Gothic Medium Cond'" word-spacing="4.26000023"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 5.0 KiB |
2
changedetectionio/static/images/price-tag-icon.svg
Normal file
@@ -0,0 +1,2 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="83.39" height="89.648" enable-background="new 0 0 122.406 122.881" version="1.1" viewBox="0 0 83.39 89.648" xml:space="preserve" xmlns="http://www.w3.org/2000/svg"><g transform="translate(5e-4 -33.234)"><path d="m44.239 42.946-39.111 39.896 34.908 34.91 39.09-39.876-1.149-34.931zm-0.91791 42.273c0.979-0.979 1.507-1.99 1.577-3.027 0.077-1.043-0.248-2.424-0.967-4.135-0.725-1.717-1.348-3.346-1.87-4.885s-0.814-3.014-0.897-4.432c-0.07-1.42 0.134-2.768 0.624-4.045 0.477-1.279 1.348-2.545 2.607-3.804 2.099-2.099 4.535-3.123 7.314-3.065 2.773 0.063 5.457 1.158 8.04 3.294l2.881 3.034c1.946 2.607 2.799 5.33 2.557 8.166-0.235 2.83-1.532 5.426-3.893 7.785l-6.296-6.297c1.291-1.291 2.035-2.531 2.238-3.727 0.191-1.197-0.165-2.252-1.081-3.168-0.821-0.82-1.717-1.195-2.69-1.139-0.967 0.064-1.908 0.547-2.817 1.457-0.922 0.922-1.393 1.914-1.412 2.977s0.306 2.416 0.973 4.064c0.661 1.652 1.24 3.25 1.736 4.801 0.496 1.553 0.782 3.035 0.858 4.445 0.076 1.426-0.127 2.787-0.591 4.104-0.477 1.316-1.336 2.596-2.588 3.848-2.125 2.125-4.522 3.186-7.212 3.18s-5.311-1.063-7.855-3.16l-3.747 3.746-2.964-2.965 3.766-3.764c-2.423-2.996-3.568-5.998-3.447-9.02 0.127-3.014 1.476-5.813 4.045-8.383l6.278 6.277c-1.412 1.412-2.175 2.799-2.277 4.16-0.108 1.367 0.414 2.627 1.571 3.783 0.839 0.84 1.755 1.26 2.741 1.242 0.985-0.017 1.92-0.47 2.798-1.347zm21.127-46.435h17.457c-0.0269 2.2368 0.69936 16.025 0.69936 16.025l0.785 23.858c0.019 0.609-0.221 1.164-0.619 1.564l5e-3 4e-3 -41.236 42.022c-0.82213 0.8378-2.175 0.83-3.004 0l-37.913-37.91c-0.83-0.83-0.83-2.176 0-3.006l41.236-42.021c0.39287-0.42671 1.502-0.53568 1.502-0.53568zm18.011 11.59c-59.392-29.687-29.696-14.843 0 0z"/></g></svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
@@ -1,46 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="18"
|
||||
height="19.92"
|
||||
viewBox="0 0 18 19.92"
|
||||
version="1.1"
|
||||
id="svg6"
|
||||
sodipodi:docname="spread.svg"
|
||||
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs10" />
|
||||
<sodipodi:namedview
|
||||
id="namedview8"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:zoom="28.416667"
|
||||
inkscape:cx="9.0087975"
|
||||
inkscape:cy="9.9941348"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1056"
|
||||
inkscape:window-x="1920"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg6" />
|
||||
<path
|
||||
d="M -3,-2 H 21 V 22 H -3 Z"
|
||||
fill="none"
|
||||
id="path2" />
|
||||
<path
|
||||
d="m 15,14.08 c -0.76,0 -1.44,0.3 -1.96,0.77 L 5.91,10.7 C 5.96,10.47 6,10.24 6,10 6,9.76 5.96,9.53 5.91,9.3 L 12.96,5.19 C 13.5,5.69 14.21,6 15,6 16.66,6 18,4.66 18,3 18,1.34 16.66,0 15,0 c -1.66,0 -3,1.34 -3,3 0,0.24 0.04,0.47 0.09,0.7 L 5.04,7.81 C 4.5,7.31 3.79,7 3,7 1.34,7 0,8.34 0,10 c 0,1.66 1.34,3 3,3 0.79,0 1.5,-0.31 2.04,-0.81 l 7.12,4.16 c -0.05,0.21 -0.08,0.43 -0.08,0.65 0,1.61 1.31,2.92 2.92,2.92 1.61,0 2.92,-1.31 2.92,-2.92 0,-1.61 -1.31,-2.92 -2.92,-2.92 z"
|
||||
id="path4"
|
||||
style="fill:#0078e7;fill-opacity:1" />
|
||||
<svg width="18" height="19.92" viewBox="0 0 18 19.92" xmlns="http://www.w3.org/2000/svg" xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<path d="M -3,-2 H 21 V 22 H -3 Z" fill="none" id="path2"/>
|
||||
<path d="m 15,14.08 c -0.76,0 -1.44,0.3 -1.96,0.77 L 5.91,10.7 C 5.96,10.47 6,10.24 6,10 6,9.76 5.96,9.53 5.91,9.3 L 12.96,5.19 C 13.5,5.69 14.21,6 15,6 16.66,6 18,4.66 18,3 18,1.34 16.66,0 15,0 c -1.66,0 -3,1.34 -3,3 0,0.24 0.04,0.47 0.09,0.7 L 5.04,7.81 C 4.5,7.31 3.79,7 3,7 1.34,7 0,8.34 0,10 c 0,1.66 1.34,3 3,3 0.79,0 1.5,-0.31 2.04,-0.81 l 7.12,4.16 c -0.05,0.21 -0.08,0.43 -0.08,0.65 0,1.61 1.31,2.92 2.92,2.92 1.61,0 2.92,-1.31 2.92,-2.92 0,-1.61 -1.31,-2.92 -2.92,-2.92 z" id="path4" style="fill:#0078e7;fill-opacity:1"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 787 B |
@@ -10,10 +10,10 @@ $(document).ready(function () {
|
||||
}
|
||||
})
|
||||
var browsersteps_session_id;
|
||||
var browserless_seconds_remaining=0;
|
||||
var browserless_seconds_remaining = 0;
|
||||
var apply_buttons_disabled = false;
|
||||
var include_text_elements = $("#include_text_elements");
|
||||
var xpath_data;
|
||||
var xpath_data = false;
|
||||
var current_selected_i;
|
||||
var state_clicked = false;
|
||||
var c;
|
||||
@@ -25,11 +25,42 @@ $(document).ready(function () {
|
||||
$(window).resize(function () {
|
||||
set_scale();
|
||||
});
|
||||
// Should always be disabled
|
||||
$('#browser_steps >li:first-child select').val('Goto site').attr('disabled', 'disabled');
|
||||
|
||||
$('a#browsersteps-tab').click(function () {
|
||||
$('#browsersteps-click-start').click(function () {
|
||||
$("#browsersteps-click-start").fadeOut();
|
||||
$("#browsersteps-selector-wrapper .spinner").fadeIn();
|
||||
start();
|
||||
});
|
||||
|
||||
$('a#browsersteps-tab').click(function () {
|
||||
reset();
|
||||
});
|
||||
|
||||
window.addEventListener('hashchange', function () {
|
||||
if (window.location.hash == '#browser-steps') {
|
||||
reset();
|
||||
}
|
||||
});
|
||||
|
||||
function reset() {
|
||||
xpath_data = false;
|
||||
$('#browsersteps-img').removeAttr('src');
|
||||
$("#browsersteps-click-start").show();
|
||||
$("#browsersteps-selector-wrapper .spinner").hide();
|
||||
browserless_seconds_remaining = 0;
|
||||
browsersteps_session_id = false;
|
||||
apply_buttons_disabled = false;
|
||||
ctx.clearRect(0, 0, c.width, c.height);
|
||||
set_first_gotosite_disabled();
|
||||
}
|
||||
|
||||
function set_first_gotosite_disabled() {
|
||||
$('#browser_steps >li:first-child select').val('Goto site').attr('disabled', 'disabled');
|
||||
$('#browser_steps >li:first-child').css('opacity', '0.5');
|
||||
}
|
||||
|
||||
// Show seconds remaining until playwright/browserless needs to restart the session
|
||||
// (See comment at the top of changedetectionio/blueprint/browser_steps/__init__.py )
|
||||
setInterval(() => {
|
||||
@@ -40,21 +71,6 @@ $(document).ready(function () {
|
||||
}, "1000")
|
||||
|
||||
|
||||
if (window.location.hash == '#browser-steps') {
|
||||
start();
|
||||
}
|
||||
|
||||
window.addEventListener('hashchange', function () {
|
||||
if (window.location.hash == '#browser-steps') {
|
||||
start();
|
||||
}
|
||||
// For when the page loads
|
||||
if (!window.location.hash || window.location.hash != '#browser-steps') {
|
||||
$("img#browsersteps-img").attr('src', '');
|
||||
return;
|
||||
}
|
||||
});
|
||||
|
||||
function set_scale() {
|
||||
|
||||
// some things to check if the scaling doesnt work
|
||||
@@ -87,7 +103,6 @@ $(document).ready(function () {
|
||||
// @todo is click better?
|
||||
$('#browsersteps-selector-canvas').off("mousemove mousedown click");
|
||||
// Undo disable_browsersteps_ui
|
||||
$("#browser_steps select,input").removeAttr('disabled').css('opacity', '1.0');
|
||||
$("#browser-steps-ui").css('opacity', '1.0');
|
||||
|
||||
// init
|
||||
@@ -99,11 +114,11 @@ $(document).ready(function () {
|
||||
e.preventDefault()
|
||||
});
|
||||
|
||||
// When the mouse moves we know which element it should be above
|
||||
// mousedown will link that to the UI (select the right action, highlight etc)
|
||||
$('#browsersteps-selector-canvas').bind('mousedown', function (e) {
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent
|
||||
e.preventDefault()
|
||||
console.log(e);
|
||||
console.log("current xpath in index is "+current_selected_i);
|
||||
last_click_xy = {'x': parseInt((1 / x_scale) * e.offsetX), 'y': parseInt((1 / y_scale) * e.offsetY)}
|
||||
process_selected(current_selected_i);
|
||||
current_selected_i = false;
|
||||
@@ -117,7 +132,12 @@ $(document).ready(function () {
|
||||
}
|
||||
});
|
||||
|
||||
// Debounce and find the current most 'interesting' element we are hovering above
|
||||
$('#browsersteps-selector-canvas').bind('mousemove', function (e) {
|
||||
if (!xpath_data) {
|
||||
return;
|
||||
}
|
||||
|
||||
// checkbox if find elements is enabled
|
||||
ctx.clearRect(0, 0, c.width, c.height);
|
||||
ctx.fillStyle = 'rgba(255,0,0, 0.1)';
|
||||
@@ -132,41 +152,40 @@ $(document).ready(function () {
|
||||
current_selected_i = false;
|
||||
// Reverse order - the most specific one should be deeper/"laster"
|
||||
// Basically, find the most 'deepest'
|
||||
//$('#browsersteps-selector-canvas').css('cursor', 'pointer');
|
||||
for (var i = xpath_data['size_pos'].length; i !== 0; i--) {
|
||||
// draw all of them? let them choose somehow?
|
||||
var sel = xpath_data['size_pos'][i - 1];
|
||||
var possible_elements = [];
|
||||
xpath_data['size_pos'].forEach(function (item, index) {
|
||||
// If we are in a bounding-box
|
||||
if (e.offsetY > sel.top * y_scale && e.offsetY < sel.top * y_scale + sel.height * y_scale
|
||||
if (e.offsetY > item.top * y_scale && e.offsetY < item.top * y_scale + item.height * y_scale
|
||||
&&
|
||||
e.offsetX > sel.left * y_scale && e.offsetX < sel.left * y_scale + sel.width * y_scale
|
||||
e.offsetX > item.left * y_scale && e.offsetX < item.left * y_scale + item.width * y_scale
|
||||
|
||||
) {
|
||||
// Only highlight these interesting types
|
||||
if (1) {
|
||||
ctx.strokeRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
|
||||
ctx.fillRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
|
||||
current_selected_i = i - 1;
|
||||
break;
|
||||
|
||||
// find the smallest one at this x,y
|
||||
// does it mean sort the xpath list by size (w*h) i think so!
|
||||
} else {
|
||||
|
||||
if ( include_text_elements[0].checked === true) {
|
||||
// blue one with background instead?
|
||||
ctx.fillStyle = 'rgba(0,0,255, 0.1)';
|
||||
ctx.strokeStyle = 'rgba(0,0,200, 0.7)';
|
||||
$('#browsersteps-selector-canvas').css('cursor', 'grab');
|
||||
ctx.strokeRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
|
||||
ctx.fillRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
|
||||
current_selected_i = i - 1;
|
||||
break;
|
||||
}
|
||||
// There could be many elements here, record them all and then we'll find out which is the most 'useful'
|
||||
// (input, textarea, button, A etc)
|
||||
if (item.width < xpath_data['browser_width']) {
|
||||
possible_elements.push(item);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Find the best one
|
||||
if (possible_elements.length) {
|
||||
possible_elements.forEach(function (item, index) {
|
||||
if (["a", "input", "textarea", "button"].includes(item['tagName'])) {
|
||||
current_selected_i = item;
|
||||
}
|
||||
});
|
||||
|
||||
if (!current_selected_i) {
|
||||
current_selected_i = possible_elements[0];
|
||||
}
|
||||
|
||||
sel = xpath_data['size_pos'][current_selected_i];
|
||||
ctx.strokeRect(current_selected_i.left * x_scale, current_selected_i.top * y_scale, current_selected_i.width * x_scale, current_selected_i.height * y_scale);
|
||||
ctx.fillRect(current_selected_i.left * x_scale, current_selected_i.top * y_scale, current_selected_i.width * x_scale, current_selected_i.height * y_scale);
|
||||
}
|
||||
|
||||
|
||||
}.debounce(10));
|
||||
});
|
||||
|
||||
@@ -175,53 +194,40 @@ $(document).ready(function () {
|
||||
// });
|
||||
|
||||
|
||||
|
||||
// callback for clicking on an xpath on the canvas
|
||||
function process_selected(xpath_data_index) {
|
||||
function process_selected(selected_in_xpath_list) {
|
||||
found_something = false;
|
||||
var first_available = $("ul#browser_steps li.empty").first();
|
||||
|
||||
|
||||
if (xpath_data_index !== false) {
|
||||
if (selected_in_xpath_list !== false) {
|
||||
// Nothing focused, so fill in a new one
|
||||
// if inpt type button or <button>
|
||||
// from the top, find the next not used one and use it
|
||||
var x = xpath_data['size_pos'][xpath_data_index];
|
||||
var x = selected_in_xpath_list;
|
||||
console.log(x);
|
||||
if (x && first_available.length) {
|
||||
// @todo will it let you click shit that has a layer ontop? probably not.
|
||||
if (x['tagtype'] === 'text' || x['tagtype'] === 'email' || x['tagName'] === 'textarea' || x['tagtype'] === 'password' || x['tagtype'] === 'search' ) {
|
||||
if (x['tagtype'] === 'text' || x['tagtype'] === 'number' || x['tagtype'] === 'email' || x['tagName'] === 'textarea' || x['tagtype'] === 'password' || x['tagtype'] === 'search') {
|
||||
$('select', first_available).val('Enter text in field').change();
|
||||
$('input[type=text]', first_available).first().val(x['xpath']);
|
||||
$('input[placeholder="Value"]', first_available).addClass('ok').click().focus();
|
||||
found_something = true;
|
||||
} else {
|
||||
// Assume it's just for clicking on
|
||||
// what are we clicking on?
|
||||
if (x['tagName'].startsWith('h')|| x['tagName'] === 'a' || x['tagName'] === 'button' || x['tagtype'] === 'submit'|| x['tagtype'] === 'checkbox'|| x['tagtype'] === 'radio'|| x['tagtype'] === 'li') {
|
||||
// There's no good way (that I know) to find if this
|
||||
// see https://stackoverflow.com/questions/446892/how-to-find-event-listeners-on-a-dom-node-in-javascript-or-in-debugging
|
||||
// https://codepen.io/azaslavsky/pen/DEJVWv
|
||||
|
||||
// So we dont know if its really a clickable element or not :-(
|
||||
// Assume it is - then we dont fill the pages with unreliable "Click X,Y" selections
|
||||
// If you switch to "Click X,y" after an element here is setup, it will give the last co-ords anyway
|
||||
//if (x['isClickable'] || x['tagName'].startsWith('h') || x['tagName'] === 'a' || x['tagName'] === 'button' || x['tagtype'] === 'submit' || x['tagtype'] === 'checkbox' || x['tagtype'] === 'radio' || x['tagtype'] === 'li') {
|
||||
$('select', first_available).val('Click element').change();
|
||||
$('input[type=text]', first_available).first().val(x['xpath']);
|
||||
found_something = true;
|
||||
}
|
||||
}
|
||||
|
||||
first_available.xpath_data_index=xpath_data_index;
|
||||
|
||||
if (!found_something) {
|
||||
if ( include_text_elements[0].checked === true) {
|
||||
// Suggest that we use as filter?
|
||||
// @todo filters should always be in the last steps, nothing non-filter after it
|
||||
found_something = true;
|
||||
ctx.strokeStyle = 'rgba(0,0,255, 0.9)';
|
||||
ctx.fillStyle = 'rgba(0,0,255, 0.1)';
|
||||
$('select', first_available).val('Extract text and use as filter').change();
|
||||
$('input[type=text]', first_available).first().val(x['xpath']);
|
||||
include_text_elements[0].checked = false;
|
||||
}
|
||||
//}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -234,15 +240,15 @@ $(document).ready(function () {
|
||||
|
||||
function start() {
|
||||
console.log("Starting browser-steps UI");
|
||||
browsersteps_session_id=Date.now();
|
||||
browsersteps_session_id = false;
|
||||
// @todo This setting of the first one should be done at the datalayer but wtforms doesnt wanna play nice
|
||||
$('#browser_steps >li:first-child').removeClass('empty');
|
||||
$('#browser_steps >li:first-child select').val('Goto site').attr('disabled', 'disabled');
|
||||
$('#browser-steps-ui .loader').show();
|
||||
set_first_gotosite_disabled();
|
||||
$('#browser-steps-ui .loader .spinner').show();
|
||||
$('.clear,.remove', $('#browser_steps >li:first-child')).hide();
|
||||
$.ajax({
|
||||
type: "GET",
|
||||
url: browser_steps_sync_url+"&browsersteps_session_id="+browsersteps_session_id,
|
||||
url: browser_steps_start_url,
|
||||
statusCode: {
|
||||
400: function () {
|
||||
// More than likely the CSRF token was lost when the server restarted
|
||||
@@ -250,12 +256,13 @@ $(document).ready(function () {
|
||||
}
|
||||
}
|
||||
}).done(function (data) {
|
||||
xpath_data = data.xpath_data;
|
||||
$('#browsersteps-img').attr('src', data.screenshot);
|
||||
$("#loading-status-text").fadeIn();
|
||||
browsersteps_session_id = data.browsersteps_session_id;
|
||||
// This should trigger 'Goto site'
|
||||
console.log("Got startup response, requesting Goto-Site (first) step fake click");
|
||||
$('#browser_steps >li:first-child .apply').click();
|
||||
browserless_seconds_remaining = data.browser_time_remaining;
|
||||
browserless_seconds_remaining = 500;
|
||||
set_first_gotosite_disabled();
|
||||
}).fail(function (data) {
|
||||
console.log(data);
|
||||
alert('There was an error communicating with the server.');
|
||||
@@ -264,7 +271,7 @@ $(document).ready(function () {
|
||||
}
|
||||
|
||||
function disable_browsersteps_ui() {
|
||||
$("#browser_steps select,input").attr('disabled', 'disabled').css('opacity', '0.5');
|
||||
set_first_gotosite_disabled();
|
||||
$("#browser-steps-ui").css('opacity', '0.3');
|
||||
$('#browsersteps-selector-canvas').off("mousemove mousedown click");
|
||||
}
|
||||
@@ -311,11 +318,20 @@ $(document).ready(function () {
|
||||
|
||||
// Add the extra buttons to the steps
|
||||
$('ul#browser_steps li').each(function (i) {
|
||||
$(this).append('<div class="control">' +
|
||||
'<a data-step-index=' + i + ' class="pure-button button-secondary button-green button-xsmall apply" >Apply</a> ' +
|
||||
'<a data-step-index=' + i + ' class="pure-button button-secondary button-xsmall clear" >Clear</a> ' +
|
||||
'<a data-step-index=' + i + ' class="pure-button button-secondary button-red button-xsmall remove" >Remove</a>' +
|
||||
'</div>')
|
||||
var s = '<div class="control">' + '<a data-step-index=' + i + ' class="pure-button button-secondary button-green button-xsmall apply" >Apply</a> ';
|
||||
if (i > 0) {
|
||||
// The first step never gets these (Goto-site)
|
||||
s += `<a data-step-index="${i}" class="pure-button button-secondary button-xsmall clear" >Clear</a> ` +
|
||||
`<a data-step-index="${i}" class="pure-button button-secondary button-red button-xsmall remove" >Remove</a>`;
|
||||
|
||||
// if a screenshot is available
|
||||
if (browser_steps_available_screenshots.includes(i.toString())) {
|
||||
var d = (browser_steps_last_error_step === i+1) ? 'before' : 'after';
|
||||
s += ` <a data-step-index="${i}" class="pure-button button-secondary button-xsmall show-screenshot" title="Show screenshot from last run" data-type="${d}">Pic</a> `;
|
||||
}
|
||||
}
|
||||
s += '</div>';
|
||||
$(this).append(s)
|
||||
}
|
||||
);
|
||||
|
||||
@@ -352,15 +368,15 @@ $(document).ready(function () {
|
||||
|
||||
$('ul#browser_steps li .control .apply').click(function (event) {
|
||||
// sequential requests @todo refactor
|
||||
if(apply_buttons_disabled) {
|
||||
if (apply_buttons_disabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
var current_data = $(event.currentTarget).closest('li');
|
||||
$('#browser-steps-ui .loader').fadeIn();
|
||||
apply_buttons_disabled=true;
|
||||
$('ul#browser_steps li .control .apply').css('opacity',0.5);
|
||||
$("#browsersteps-img").css('opacity',0.65);
|
||||
$('#browser-steps-ui .loader .spinner').fadeIn();
|
||||
apply_buttons_disabled = true;
|
||||
$('ul#browser_steps li .control .apply').css('opacity', 0.5);
|
||||
$("#browsersteps-img").css('opacity', 0.65);
|
||||
|
||||
var is_last_step = 0;
|
||||
var step_n = $(event.currentTarget).data('step-index');
|
||||
@@ -372,17 +388,17 @@ $(document).ready(function () {
|
||||
}
|
||||
});
|
||||
|
||||
if (is_last_step == (step_n+1)) {
|
||||
if (is_last_step == (step_n + 1)) {
|
||||
is_last_step = true;
|
||||
} else {
|
||||
is_last_step = false;
|
||||
}
|
||||
|
||||
|
||||
console.log("Requesting step via POST " + $("select[id$='operation']", current_data).first().val());
|
||||
// POST the currently clicked step form widget back and await response, redraw
|
||||
$.ajax({
|
||||
method: "POST",
|
||||
url: browser_steps_sync_url+"&browsersteps_session_id="+browsersteps_session_id,
|
||||
url: browser_steps_sync_url + "&browsersteps_session_id=" + browsersteps_session_id,
|
||||
data: {
|
||||
'operation': $("select[id$='operation']", current_data).first().val(),
|
||||
'selector': $("input[id$='selector']", current_data).first().val(),
|
||||
@@ -395,32 +411,56 @@ $(document).ready(function () {
|
||||
// More than likely the CSRF token was lost when the server restarted
|
||||
alert("There was a problem processing the request, please reload the page.");
|
||||
$("#loading-status-text").hide();
|
||||
$('#browser-steps-ui .loader .spinner').fadeOut();
|
||||
},
|
||||
401: function (data) {
|
||||
// More than likely the CSRF token was lost when the server restarted
|
||||
alert(data.responseText);
|
||||
$("#loading-status-text").hide();
|
||||
$('#browser-steps-ui .loader .spinner').fadeOut();
|
||||
}
|
||||
}
|
||||
}).done(function (data) {
|
||||
// it should return the new state (selectors available and screenshot)
|
||||
xpath_data = data.xpath_data;
|
||||
$('#browsersteps-img').attr('src', data.screenshot);
|
||||
$('#browser-steps-ui .loader').fadeOut();
|
||||
apply_buttons_disabled=false;
|
||||
$("#browsersteps-img").css('opacity',1);
|
||||
$('ul#browser_steps li .control .apply').css('opacity',1);
|
||||
browserless_seconds_remaining = data.browser_time_remaining;
|
||||
$('#browser-steps-ui .loader .spinner').fadeOut();
|
||||
apply_buttons_disabled = false;
|
||||
$("#browsersteps-img").css('opacity', 1);
|
||||
$('ul#browser_steps li .control .apply').css('opacity', 1);
|
||||
$("#loading-status-text").hide();
|
||||
set_first_gotosite_disabled();
|
||||
}).fail(function (data) {
|
||||
console.log(data);
|
||||
if (data.responseText.includes("Browser session expired")) {
|
||||
disable_browsersteps_ui();
|
||||
}
|
||||
apply_buttons_disabled=false;
|
||||
apply_buttons_disabled = false;
|
||||
$("#loading-status-text").hide();
|
||||
$('ul#browser_steps li .control .apply').css('opacity',1);
|
||||
$("#browsersteps-img").css('opacity',1);
|
||||
//$('#browsersteps-selector-wrapper .loader').fadeOut(2500);
|
||||
$('ul#browser_steps li .control .apply').css('opacity', 1);
|
||||
$("#browsersteps-img").css('opacity', 1);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
$('ul#browser_steps li .control .show-screenshot').click(function (element) {
|
||||
var step_n = $(event.currentTarget).data('step-index');
|
||||
w = window.open(this.href, "_blank", "width=640,height=480");
|
||||
const t = $(event.currentTarget).data('type');
|
||||
|
||||
const url = browser_steps_fetch_screenshot_image_url + `&step_n=${step_n}&type=${t}`;
|
||||
w.document.body.innerHTML = `<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<body>
|
||||
<img src="${url}" style="width: 100%" alt="Browser Step at step ${step_n} from last run." title="Browser Step at step ${step_n} from last run."/>
|
||||
</body>
|
||||
</html>`;
|
||||
w.document.title = `Browser Step at step ${step_n} from last run.`;
|
||||
});
|
||||
|
||||
if (browser_steps_last_error_step) {
|
||||
$("ul#browser_steps>li:nth-child("+browser_steps_last_error_step+")").addClass("browser-step-with-error");
|
||||
}
|
||||
|
||||
$("ul#browser_steps select").change(function () {
|
||||
set_greyed_state();
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
$(document).ready(function () {
|
||||
var csrftoken = $('input[name=csrf_token]').val();
|
||||
$.ajaxSetup({
|
||||
beforeSend: function (xhr, settings) {
|
||||
if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) {
|
||||
xhr.setRequestHeader("X-CSRFToken", csrftoken)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Load it when the #screenshot tab is in use, so we dont give a slow experience when waiting for the text diff to load
|
||||
window.addEventListener('hashchange', function (e) {
|
||||
toggle(location.hash);
|
||||
@@ -13,11 +22,73 @@ $(document).ready(function () {
|
||||
} else if (hash_name === '#error-screenshot') {
|
||||
$("img#error-screenshot-img").attr('src', error_screenshot_url);
|
||||
$("#settings").hide();
|
||||
}
|
||||
|
||||
|
||||
else {
|
||||
} else if (hash_name === '#extract') {
|
||||
$("#settings").hide();
|
||||
} else {
|
||||
$("#settings").show();
|
||||
}
|
||||
}
|
||||
|
||||
const article = $('.highlightable-filter')[0];
|
||||
|
||||
// We could also add the 'touchend' event for touch devices, but since
|
||||
// most iOS/Android browsers already show a dialog when you select
|
||||
// text (often with a Share option) we'll skip that
|
||||
article.addEventListener('mouseup', dragTextHandler, false);
|
||||
article.addEventListener('mousedown', clean, false);
|
||||
|
||||
function clean(event) {
|
||||
$("#highlightSnippet").remove();
|
||||
}
|
||||
|
||||
|
||||
function dragTextHandler(event) {
|
||||
console.log('mouseupped');
|
||||
|
||||
// Check if any text was selected
|
||||
if (window.getSelection().toString().length > 0) {
|
||||
|
||||
// Find out how much (if any) user has scrolled
|
||||
var scrollTop = (window.pageYOffset !== undefined) ? window.pageYOffset : (document.documentElement || document.body.parentNode || document.body).scrollTop;
|
||||
|
||||
// Get cursor position
|
||||
const posX = event.clientX;
|
||||
const posY = event.clientY + 20 + scrollTop;
|
||||
|
||||
// Append HTML to the body, create the "Tweet Selection" dialog
|
||||
document.body.insertAdjacentHTML('beforeend', '<div id="highlightSnippet" style="position: absolute; top: ' + posY + 'px; left: ' + posX + 'px;"><div class="pure-form-message-inline" style="font-size: 70%">Ignore any change on any line which contains the selected text.</div><br><a data-mode="exact" href="javascript:void(0);" class="pure-button button-secondary button-xsmall">Ignore exact text</a> </div>');
|
||||
|
||||
if (/\d/.test(window.getSelection().toString())) {
|
||||
// Offer regex replacement
|
||||
document.getElementById("highlightSnippet").insertAdjacentHTML('beforeend', '<a data-mode="digit-regex" href="javascript:void(0);" class="pure-button button-secondary button-xsmall">Ignore text including number changes</a>');
|
||||
}
|
||||
|
||||
$('#highlightSnippet a').bind('click', function (e) {
|
||||
if(!window.getSelection().toString().trim().length) {
|
||||
alert('Oops no text selected!');
|
||||
return;
|
||||
}
|
||||
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: highlight_submit_ignore_url,
|
||||
data: {'mode': $(this).data('mode'), 'selection': window.getSelection().toString()},
|
||||
statusCode: {
|
||||
400: function () {
|
||||
// More than likely the CSRF token was lost when the server restarted
|
||||
alert("There was a problem processing the request, please reload the page.");
|
||||
}
|
||||
}
|
||||
}).done(function (data) {
|
||||
$("#highlightSnippet").html(data)
|
||||
}).fail(function (data) {
|
||||
console.log(data);
|
||||
alert('There was an error communicating with the server.');
|
||||
});
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
});
|
||||
|
||||
@@ -1,112 +1,120 @@
|
||||
var a = document.getElementById('a');
|
||||
var b = document.getElementById('b');
|
||||
var result = document.getElementById('result');
|
||||
$(document).ready(function () {
|
||||
var a = document.getElementById("a");
|
||||
var b = document.getElementById("b");
|
||||
var result = document.getElementById("result");
|
||||
var inputs;
|
||||
|
||||
function changed() {
|
||||
// https://github.com/kpdecker/jsdiff/issues/389
|
||||
// I would love to use `{ignoreWhitespace: true}` here but it breaks the formatting
|
||||
options = {ignoreWhitespace: document.getElementById('ignoreWhitespace').checked};
|
||||
$('#jump-next-diff').click(function () {
|
||||
|
||||
var diff = Diff[window.diffType](a.textContent, b.textContent, options);
|
||||
var fragment = document.createDocumentFragment();
|
||||
for (var i = 0; i < diff.length; i++) {
|
||||
var element = inputs[inputs.current];
|
||||
var headerOffset = 80;
|
||||
var elementPosition = element.getBoundingClientRect().top;
|
||||
var offsetPosition = elementPosition - headerOffset + window.scrollY;
|
||||
|
||||
if (diff[i].added && diff[i + 1] && diff[i + 1].removed) {
|
||||
var swap = diff[i];
|
||||
diff[i] = diff[i + 1];
|
||||
diff[i + 1] = swap;
|
||||
window.scrollTo({
|
||||
top: offsetPosition,
|
||||
behavior: "smooth",
|
||||
});
|
||||
|
||||
inputs.current++;
|
||||
if (inputs.current >= inputs.length) {
|
||||
inputs.current = 0;
|
||||
}
|
||||
|
||||
var node;
|
||||
if (diff[i].removed) {
|
||||
node = document.createElement('del');
|
||||
node.classList.add("change");
|
||||
node.appendChild(document.createTextNode(diff[i].value));
|
||||
|
||||
} else if (diff[i].added) {
|
||||
node = document.createElement('ins');
|
||||
node.classList.add("change");
|
||||
node.appendChild(document.createTextNode(diff[i].value));
|
||||
} else {
|
||||
node = document.createTextNode(diff[i].value);
|
||||
}
|
||||
fragment.appendChild(node);
|
||||
}
|
||||
|
||||
result.textContent = '';
|
||||
result.appendChild(fragment);
|
||||
|
||||
// Jump at start
|
||||
inputs.current = 0;
|
||||
next_diff();
|
||||
}
|
||||
|
||||
window.onload = function () {
|
||||
|
||||
|
||||
/* Convert what is options from UTC time.time() to local browser time */
|
||||
var diffList = document.getElementById("diff-version");
|
||||
if (typeof (diffList) != 'undefined' && diffList != null) {
|
||||
for (var option of diffList.options) {
|
||||
var dateObject = new Date(option.value * 1000);
|
||||
option.label = dateObject.toLocaleString();
|
||||
}
|
||||
}
|
||||
|
||||
/* Set current version date as local time in the browser also */
|
||||
var current_v = document.getElementById("current-v-date");
|
||||
var dateObject = new Date(newest_version_timestamp*1000);
|
||||
current_v.innerHTML = dateObject.toLocaleString();
|
||||
onDiffTypeChange(document.querySelector('#settings [name="diff_type"]:checked'));
|
||||
changed();
|
||||
};
|
||||
|
||||
a.onpaste = a.onchange =
|
||||
b.onpaste = b.onchange = changed;
|
||||
|
||||
if ('oninput' in a) {
|
||||
a.oninput = b.oninput = changed;
|
||||
} else {
|
||||
a.onkeyup = b.onkeyup = changed;
|
||||
}
|
||||
|
||||
function onDiffTypeChange(radio) {
|
||||
window.diffType = radio.value;
|
||||
// Not necessary
|
||||
// document.title = "Diff " + radio.value.slice(4);
|
||||
}
|
||||
|
||||
var radio = document.getElementsByName('diff_type');
|
||||
for (var i = 0; i < radio.length; i++) {
|
||||
radio[i].onchange = function (e) {
|
||||
onDiffTypeChange(e.target);
|
||||
changed();
|
||||
}
|
||||
}
|
||||
|
||||
document.getElementById('ignoreWhitespace').onchange = function (e) {
|
||||
changed();
|
||||
}
|
||||
|
||||
|
||||
var inputs = document.getElementsByClassName('change');
|
||||
inputs.current = 0;
|
||||
|
||||
|
||||
function next_diff() {
|
||||
|
||||
var element = inputs[inputs.current];
|
||||
var headerOffset = 80;
|
||||
var elementPosition = element.getBoundingClientRect().top;
|
||||
var offsetPosition = elementPosition - headerOffset + window.scrollY;
|
||||
|
||||
window.scrollTo({
|
||||
top: offsetPosition,
|
||||
behavior: "smooth"
|
||||
});
|
||||
|
||||
inputs.current++;
|
||||
if (inputs.current >= inputs.length) {
|
||||
function changed() {
|
||||
// https://github.com/kpdecker/jsdiff/issues/389
|
||||
// I would love to use `{ignoreWhitespace: true}` here but it breaks the formatting
|
||||
options = {
|
||||
ignoreWhitespace: document.getElementById("ignoreWhitespace").checked,
|
||||
};
|
||||
|
||||
var diff = Diff[window.diffType](a.textContent, b.textContent, options);
|
||||
var fragment = document.createDocumentFragment();
|
||||
for (var i = 0; i < diff.length; i++) {
|
||||
if (diff[i].added && diff[i + 1] && diff[i + 1].removed) {
|
||||
var swap = diff[i];
|
||||
diff[i] = diff[i + 1];
|
||||
diff[i + 1] = swap;
|
||||
}
|
||||
|
||||
var node;
|
||||
if (diff[i].removed) {
|
||||
node = document.createElement("del");
|
||||
node.classList.add("change");
|
||||
const wrapper = node.appendChild(document.createElement("span"));
|
||||
wrapper.appendChild(document.createTextNode(diff[i].value));
|
||||
} else if (diff[i].added) {
|
||||
node = document.createElement("ins");
|
||||
node.classList.add("change");
|
||||
const wrapper = node.appendChild(document.createElement("span"));
|
||||
wrapper.appendChild(document.createTextNode(diff[i].value));
|
||||
} else {
|
||||
node = document.createTextNode(diff[i].value);
|
||||
}
|
||||
fragment.appendChild(node);
|
||||
}
|
||||
|
||||
result.textContent = "";
|
||||
result.appendChild(fragment);
|
||||
|
||||
// For nice mouse-over hover/title information
|
||||
const removed_current_option = $('#diff-version option:selected')
|
||||
if (removed_current_option) {
|
||||
$('del').each(function () {
|
||||
$(this).prop('title', 'Removed '+removed_current_option[0].label);
|
||||
});
|
||||
}
|
||||
const inserted_current_option = $('#current-version option:selected')
|
||||
if (removed_current_option) {
|
||||
$('ins').each(function () {
|
||||
$(this).prop('title', 'Inserted '+inserted_current_option[0].label);
|
||||
});
|
||||
}
|
||||
// Set the list of possible differences to jump to
|
||||
inputs = document.querySelectorAll('#diff-ui .change')
|
||||
// Set the "current" diff pointer
|
||||
inputs.current = 0;
|
||||
// Goto diff
|
||||
$('#jump-next-diff').click();
|
||||
}
|
||||
}
|
||||
|
||||
$('.needs-localtime').each(function () {
|
||||
for (var option of this.options) {
|
||||
var dateObject = new Date(option.value * 1000);
|
||||
option.label = dateObject.toLocaleString(undefined, {dateStyle: "full", timeStyle: "medium"});
|
||||
}
|
||||
})
|
||||
onDiffTypeChange(
|
||||
document.querySelector('#settings [name="diff_type"]:checked'),
|
||||
);
|
||||
changed();
|
||||
|
||||
a.onpaste = a.onchange = b.onpaste = b.onchange = changed;
|
||||
|
||||
if ("oninput" in a) {
|
||||
a.oninput = b.oninput = changed;
|
||||
} else {
|
||||
a.onkeyup = b.onkeyup = changed;
|
||||
}
|
||||
|
||||
function onDiffTypeChange(radio) {
|
||||
window.diffType = radio.value;
|
||||
// Not necessary
|
||||
// document.title = "Diff " + radio.value.slice(4);
|
||||
}
|
||||
|
||||
var radio = document.getElementsByName("diff_type");
|
||||
for (var i = 0; i < radio.length; i++) {
|
||||
radio[i].onchange = function (e) {
|
||||
onDiffTypeChange(e.target);
|
||||
changed();
|
||||
};
|
||||
}
|
||||
|
||||
document.getElementById("ignoreWhitespace").onchange = function (e) {
|
||||
changed();
|
||||
};
|
||||
|
||||
});
|
||||
|
||||
|
||||
@@ -32,5 +32,10 @@ $(document).ready(function () {
|
||||
window.getSelection().removeAllRanges();
|
||||
|
||||
});
|
||||
|
||||
$("#notification-token-toggle").click(function (e) {
|
||||
e.preventDefault();
|
||||
$('#notification-tokens-info').toggle();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -26,9 +26,6 @@ $(document).ready(function() {
|
||||
data = {
|
||||
window_url : window.location.href,
|
||||
notification_urls : $('.notification-urls').val(),
|
||||
notification_title : $('.notification-title').val(),
|
||||
notification_body : $('.notification-body').val(),
|
||||
notification_format : $('.notification-format').val(),
|
||||
}
|
||||
for (key in data) {
|
||||
if (!data[key].length) {
|
||||
|
||||
87
changedetectionio/static/js/recheck-proxy.js
Normal file
@@ -0,0 +1,87 @@
|
||||
$(function () {
|
||||
/* add container before each proxy location to show status */
|
||||
|
||||
var option_li = $('.fetch-backend-proxy li').filter(function() {
|
||||
return $("input",this)[0].value.length >0;
|
||||
});
|
||||
|
||||
//var option_li = $('.fetch-backend-proxy li');
|
||||
var isActive = false;
|
||||
$(option_li).prepend('<div class="proxy-status"></div>');
|
||||
$(option_li).append('<div class="proxy-timing"></div><div class="proxy-check-details"></div>');
|
||||
|
||||
function set_proxy_check_status(proxy_key, state) {
|
||||
// select input by value name
|
||||
const proxy_li = $('input[value="' + proxy_key + '" ]').parent();
|
||||
if (state['status'] === 'RUNNING') {
|
||||
$('.proxy-status', proxy_li).html('<span class="spinner"></span>');
|
||||
}
|
||||
if (state['status'] === 'OK') {
|
||||
$('.proxy-status', proxy_li).html('<span style="color: green; font-weight: bold" >OK</span>');
|
||||
$('.proxy-check-details', proxy_li).html(state['text']);
|
||||
}
|
||||
if (state['status'] === 'ERROR' || state['status'] === 'ERROR OTHER') {
|
||||
$('.proxy-status', proxy_li).html('<span style="color: red; font-weight: bold" >X</span>');
|
||||
$('.proxy-check-details', proxy_li).html(state['text']);
|
||||
}
|
||||
$('.proxy-timing', proxy_li).html(state['time']);
|
||||
}
|
||||
|
||||
|
||||
function pollServer() {
|
||||
if (isActive) {
|
||||
window.setTimeout(function () {
|
||||
$.ajax({
|
||||
url: proxy_recheck_status_url,
|
||||
success: function (data) {
|
||||
var all_done = true;
|
||||
$.each(data, function (proxy_key, state) {
|
||||
set_proxy_check_status(proxy_key, state);
|
||||
if (state['status'] === 'RUNNING') {
|
||||
all_done = false;
|
||||
}
|
||||
});
|
||||
|
||||
if (all_done) {
|
||||
console.log("Shutting down poller, all done.")
|
||||
isActive = false;
|
||||
} else {
|
||||
pollServer();
|
||||
}
|
||||
},
|
||||
error: function () {
|
||||
//ERROR HANDLING
|
||||
pollServer();
|
||||
}
|
||||
});
|
||||
}, 2000);
|
||||
}
|
||||
}
|
||||
|
||||
$('#check-all-proxies').click(function (e) {
|
||||
e.preventDefault()
|
||||
$('body').addClass('proxy-check-active');
|
||||
$('.proxy-check-details').html('');
|
||||
$('.proxy-status').html('<span class="spinner"></span>').fadeIn();
|
||||
$('.proxy-timing').html('');
|
||||
|
||||
// Request start, needs CSRF?
|
||||
$.ajax({
|
||||
type: "GET",
|
||||
url: recheck_proxy_start_url,
|
||||
}).done(function (data) {
|
||||
$.each(data, function (proxy_key, state) {
|
||||
set_proxy_check_status(proxy_key, state['status'])
|
||||
});
|
||||
isActive = true;
|
||||
pollServer();
|
||||
|
||||
}).fail(function (data) {
|
||||
console.log(data);
|
||||
alert('There was an error communicating with the server.');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@@ -12,7 +12,7 @@ window.addEventListener('hashchange', function () {
|
||||
var has_errors = document.querySelectorAll(".messages .error");
|
||||
if (!has_errors.length) {
|
||||
if (document.location.hash == "") {
|
||||
document.querySelector(".tabs ul li:first-child a").click();
|
||||
location.replace(document.querySelector(".tabs ul li:first-child a").hash);
|
||||
} else {
|
||||
set_active_tab();
|
||||
}
|
||||
|
||||
52
changedetectionio/static/js/toggle-theme.js
Normal file
@@ -0,0 +1,52 @@
|
||||
/**
|
||||
* @file
|
||||
* Toggles theme between light and dark mode.
|
||||
*/
|
||||
$(document).ready(function () {
|
||||
const button = document.getElementById("toggle-light-mode");
|
||||
|
||||
button.onclick = () => {
|
||||
const htmlElement = document.getElementsByTagName("html");
|
||||
const isDarkMode = htmlElement[0].dataset.darkmode === "true";
|
||||
htmlElement[0].dataset.darkmode = !isDarkMode;
|
||||
setCookieValue(!isDarkMode);
|
||||
};
|
||||
|
||||
const setCookieValue = (value) => {
|
||||
document.cookie = `css_dark_mode=${value};max-age=31536000;path=/`
|
||||
}
|
||||
|
||||
// Search input box behaviour
|
||||
const toggle_search = document.getElementById("toggle-search");
|
||||
const search_q = document.getElementById("search-q");
|
||||
window.addEventListener('keydown', function (e) {
|
||||
|
||||
if (e.altKey == true && e.keyCode == 83)
|
||||
search_q.classList.toggle('expanded');
|
||||
search_q.focus();
|
||||
});
|
||||
|
||||
|
||||
search_q.onkeydown = (e) => {
|
||||
var key = e.keyCode || e.which;
|
||||
if (key === 13) {
|
||||
document.searchForm.submit();
|
||||
}
|
||||
};
|
||||
toggle_search.onclick = () => {
|
||||
// Could be that they want to search something once text is in there
|
||||
if (search_q.value.length) {
|
||||
document.searchForm.submit();
|
||||
} else {
|
||||
// If not..
|
||||
search_q.classList.toggle('expanded');
|
||||
search_q.focus();
|
||||
}
|
||||
};
|
||||
|
||||
$('#heart-us').click(function () {
|
||||
$("#overlay").toggleClass('visible');
|
||||
heartpath.style.fill = document.getElementById("overlay").classList.contains("visible") ? '#ff0000' : 'var(--color-background)';
|
||||
});
|
||||
|
||||
});
|
||||
@@ -1,4 +1,5 @@
|
||||
// Horrible proof of concept code :)
|
||||
// Copyright (C) 2021 Leigh Morresi (dgtlmoon@gmail.com)
|
||||
// All rights reserved.
|
||||
// yes - this is really a hack, if you are a front-ender and want to help, please get in touch!
|
||||
|
||||
$(document).ready(function () {
|
||||
@@ -60,7 +61,12 @@ $(document).ready(function () {
|
||||
function bootstrap_visualselector() {
|
||||
if (1) {
|
||||
// bootstrap it, this will trigger everything else
|
||||
$("img#selector-background").bind('load', function () {
|
||||
$("img#selector-background").on("error", function () {
|
||||
$('.fetching-update-notice').html("<strong>Ooops!</strong> The VisualSelector tool needs atleast one fetched page, please unpause the watch and/or wait for the watch to complete fetching and then reload this page.");
|
||||
$('.fetching-update-notice').css('color','#bb0000');
|
||||
$('#selector-current-xpath').hide();
|
||||
$('#clear-selector').hide();
|
||||
}).bind('load', function () {
|
||||
console.log("Loaded background...");
|
||||
c = document.getElementById("selector-canvas");
|
||||
// greyed out fill context
|
||||
@@ -78,10 +84,11 @@ $(document).ready(function () {
|
||||
}).attr("src", screenshot_url);
|
||||
}
|
||||
// Tell visualSelector that the image should update
|
||||
var s = $("img#selector-background").attr('src')+"?"+ new Date().getTime();
|
||||
$("img#selector-background").attr('src',s)
|
||||
var s = $("img#selector-background").attr('src') + "?" + new Date().getTime();
|
||||
$("img#selector-background").attr('src', s)
|
||||
}
|
||||
|
||||
// This is fired once the img src is loaded in bootstrap_visualselector()
|
||||
function fetch_data() {
|
||||
// Image is ready
|
||||
$('.fetching-update-notice').html("Fetching element data..");
|
||||
@@ -98,7 +105,8 @@ $(document).ready(function () {
|
||||
reflow_selector();
|
||||
$('.fetching-update-notice').fadeOut();
|
||||
});
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
function set_scale() {
|
||||
@@ -177,9 +185,10 @@ $(document).ready(function () {
|
||||
// Basically, find the most 'deepest'
|
||||
var found = 0;
|
||||
ctx.fillStyle = 'rgba(205,0,0,0.35)';
|
||||
for (var i = selector_data['size_pos'].length; i !== 0; i--) {
|
||||
// Will be sorted by smallest width*height first
|
||||
for (var i = 0; i <= selector_data['size_pos'].length; i++) {
|
||||
// draw all of them? let them choose somehow?
|
||||
var sel = selector_data['size_pos'][i - 1];
|
||||
var sel = selector_data['size_pos'][i];
|
||||
// If we are in a bounding-box
|
||||
if (e.offsetY > sel.top * y_scale && e.offsetY < sel.top * y_scale + sel.height * y_scale
|
||||
&&
|
||||
@@ -195,7 +204,7 @@ $(document).ready(function () {
|
||||
// no need to keep digging
|
||||
// @todo or, O to go out/up, I to go in
|
||||
// or double click to go up/out the selector?
|
||||
current_selected_i = i - 1;
|
||||
current_selected_i = i;
|
||||
found += 1;
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -1,31 +1,53 @@
|
||||
$(function () {
|
||||
// Remove unviewed status when normally clicked
|
||||
$('.diff-link').click(function () {
|
||||
$(this).closest('.unviewed').removeClass('unviewed');
|
||||
});
|
||||
// Remove unviewed status when normally clicked
|
||||
$('.diff-link').click(function () {
|
||||
$(this).closest('.unviewed').removeClass('unviewed');
|
||||
});
|
||||
|
||||
$('td[data-timestamp]').each(function () {
|
||||
$(this).prop('title', new Intl.DateTimeFormat(undefined,
|
||||
{
|
||||
dateStyle: 'full',
|
||||
timeStyle: 'long'
|
||||
}).format($(this).data('timestamp') * 1000));
|
||||
})
|
||||
|
||||
$('.with-share-link > *').click(function () {
|
||||
$("#copied-clipboard").remove();
|
||||
$("#checkbox-assign-tag").click(function (e) {
|
||||
$('#op_extradata').val(prompt("Enter a tag name"));
|
||||
});
|
||||
|
||||
var range = document.createRange();
|
||||
var n=$("#share-link")[0];
|
||||
range.selectNode(n);
|
||||
window.getSelection().removeAllRanges();
|
||||
window.getSelection().addRange(range);
|
||||
document.execCommand("copy");
|
||||
window.getSelection().removeAllRanges();
|
||||
$('.with-share-link > *').click(function () {
|
||||
$("#copied-clipboard").remove();
|
||||
|
||||
$('.with-share-link').append('<span style="font-size: 80%; color: #fff;" id="copied-clipboard">Copied to clipboard</span>');
|
||||
$("#copied-clipboard").fadeOut(2500, function() {
|
||||
$(this).remove();
|
||||
});
|
||||
});
|
||||
var range = document.createRange();
|
||||
var n = $("#share-link")[0];
|
||||
range.selectNode(n);
|
||||
window.getSelection().removeAllRanges();
|
||||
window.getSelection().addRange(range);
|
||||
document.execCommand("copy");
|
||||
window.getSelection().removeAllRanges();
|
||||
|
||||
$('.with-share-link').append('<span style="font-size: 80%; color: #fff;" id="copied-clipboard">Copied to clipboard</span>');
|
||||
$("#copied-clipboard").fadeOut(2500, function () {
|
||||
$(this).remove();
|
||||
});
|
||||
});
|
||||
|
||||
$(".watch-table tr").click(function (event) {
|
||||
var tagName = event.target.tagName.toLowerCase();
|
||||
if (tagName === 'tr' || tagName === 'td') {
|
||||
var x = $('input[type=checkbox]', this);
|
||||
if (x) {
|
||||
$(x).click();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// checkboxes - check all
|
||||
$("#check-all").click(function (e) {
|
||||
$('input[type=checkbox]').not(this).prop('checked', this.checked);
|
||||
});
|
||||
|
||||
// checkboxes - show/hide buttons
|
||||
$("input[type=checkbox]").click(function (e) {
|
||||
if ($('input[type=checkbox]:checked').length) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
$(document).ready(function() {
|
||||
$(document).ready(function () {
|
||||
function toggle() {
|
||||
if ($('input[name="fetch_backend"]:checked').val() == 'html_webdriver') {
|
||||
if(playwright_enabled) {
|
||||
if (playwright_enabled) {
|
||||
// playwright supports headers, so hide everything else
|
||||
// See #664
|
||||
$('#requests-override-options #request-method').hide();
|
||||
@@ -14,9 +14,14 @@ $(document).ready(function() {
|
||||
$('#requests-override-options').hide();
|
||||
}
|
||||
|
||||
|
||||
$('#webdriver-override-options').show();
|
||||
|
||||
} else if ($('input[name="fetch_backend"]:checked').val() == 'system') {
|
||||
$('#requests-override-options #request-method').hide();
|
||||
$('#requests-override-options #request-body').hide();
|
||||
$('#ignore-status-codes-option').hide();
|
||||
$('#requests-override-options').hide();
|
||||
$('#webdriver-override-options').hide();
|
||||
} else {
|
||||
|
||||
$('#requests-override-options').show();
|
||||
@@ -37,4 +42,8 @@ $(document).ready(function() {
|
||||
$('#notification_urls').val('');
|
||||
e.preventDefault();
|
||||
});
|
||||
$("#notification-token-toggle").click(function (e) {
|
||||
e.preventDefault();
|
||||
$('#notification-tokens-info').toggle();
|
||||
});
|
||||
});
|
||||
|
||||
3
changedetectionio/static/styles/.dockerignore
Normal file
@@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
package-lock.json
|
||||
|
||||
@@ -1,10 +1,148 @@
|
||||
/**
|
||||
* CSS custom properties (aka variables).
|
||||
*/
|
||||
:root {
|
||||
--color-white: #fff;
|
||||
--color-grey-50: #111;
|
||||
--color-grey-100: #262626;
|
||||
--color-grey-200: #333;
|
||||
--color-grey-300: #444;
|
||||
--color-grey-325: #555;
|
||||
--color-grey-350: #565d64;
|
||||
--color-grey-400: #666;
|
||||
--color-grey-500: #777;
|
||||
--color-grey-600: #999;
|
||||
--color-grey-700: #cbcbcb;
|
||||
--color-grey-750: #ddd;
|
||||
--color-grey-800: #e0e0e0;
|
||||
--color-grey-850: #eee;
|
||||
--color-grey-900: #f2f2f2;
|
||||
--color-black: #000;
|
||||
--color-dark-red: #a00;
|
||||
--color-light-red: #dd0000;
|
||||
--color-background-page: var(--color-grey-100);
|
||||
--color-background-gradient-first: #5ad8f7;
|
||||
--color-background-gradient-second: #2f50af;
|
||||
--color-background-gradient-third: #9150bf;
|
||||
--color-background: var(--color-white);
|
||||
--color-text: var(--color-grey-200);
|
||||
--color-link: #1b98f8;
|
||||
--color-menu-accent: #ed5900;
|
||||
--color-background-code: var(--color-grey-850);
|
||||
--color-error: var(--color-dark-red);
|
||||
--color-error-input: #ffebeb;
|
||||
--color-error-list: var(--color-light-red);
|
||||
--color-table-background: var(--color-background);
|
||||
--color-table-stripe: var(--color-grey-900);
|
||||
--color-text-tab: var(--color-white);
|
||||
--color-background-tab: rgba(255, 255, 255, 0.2);
|
||||
--color-background-tab-hover: rgba(255, 255, 255, 0.5);
|
||||
--color-text-tab-active: #222;
|
||||
--color-api-key: #0078e7;
|
||||
--color-background-button-primary: #0078e7;
|
||||
--color-background-button-green: #42dd53;
|
||||
--color-background-button-red: #dd4242;
|
||||
--color-background-button-success: rgb(28, 184, 65);
|
||||
--color-background-button-error: rgb(202, 60, 60);
|
||||
--color-text-button-error: var(--color-white);
|
||||
--color-background-button-warning: rgb(202, 60, 60);
|
||||
--color-text-button-warning: var(--color-white);
|
||||
--color-background-button-secondary: rgb(66, 184, 221);
|
||||
--color-background-button-cancel: rgb(200, 200, 200);
|
||||
--color-text-button: var(--color-white);
|
||||
--color-background-button-tag: rgb(99, 99, 99);
|
||||
--color-background-snapshot-age: #dfdfdf;
|
||||
--color-error-text-snapshot-age: var(--color-white);
|
||||
--color-error-background-snapshot-age: #ff0000;
|
||||
--color-background-button-tag-active: #9c9c9c;
|
||||
--color-text-messages: var(--color-white);
|
||||
--color-background-messages-message: rgba(255, 255, 255, .2);
|
||||
--color-background-messages-error: rgba(255, 1, 1, .5);
|
||||
--color-background-messages-notice: rgba(255, 255, 255, .5);
|
||||
--color-border-notification: #ccc;
|
||||
--color-background-checkbox-operations: rgba(0, 0, 0, 0.05);
|
||||
--color-warning: #ff3300;
|
||||
--color-border-warning: var(--color-warning);
|
||||
--color-text-legend: var(--color-white);
|
||||
--color-link-new-version: #e07171;
|
||||
--color-last-checked: #bbb;
|
||||
--color-text-footer: #444;
|
||||
--color-border-watch-table-cell: #eee;
|
||||
--color-text-watch-tag-list: #e70069;
|
||||
--color-background-new-watch-form: rgba(0, 0, 0, 0.05);
|
||||
--color-background-new-watch-input: var(--color-white);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-border-input: var(--color-grey-500);
|
||||
--color-shadow-input: var(--color-grey-400);
|
||||
--color-background-input: var(--color-white);
|
||||
--color-text-input: var(--color-text);
|
||||
--color-text-input-description: var(--color-grey-500);
|
||||
--color-text-input-placeholder: var(--color-grey-600);
|
||||
--color-background-table-thead: var(--color-grey-800);
|
||||
--color-border-table-cell: var(--color-grey-700);
|
||||
--color-text-menu-heading: var(--color-grey-350);
|
||||
--color-text-menu-link: var(--color-grey-500);
|
||||
--color-background-menu-link-hover: var(--color-grey-850);
|
||||
--color-text-menu-link-hover: var(--color-grey-300);
|
||||
--color-shadow-jump: var(--color-grey-500);
|
||||
--color-icon-github: var(--color-black);
|
||||
--color-icon-github-hover: var(--color-grey-300);
|
||||
--color-watch-table-error: var(--color-dark-red);
|
||||
--color-watch-table-row-text: var(--color-grey-100); }
|
||||
|
||||
html[data-darkmode="true"] {
|
||||
--color-link: #59bdfb;
|
||||
--color-text: var(--color-white);
|
||||
--color-background-gradient-first: #3f90a5;
|
||||
--color-background-gradient-second: #1e316c;
|
||||
--color-background-gradient-third: #4d2c64;
|
||||
--color-background-new-watch-input: var(--color-grey-100);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-background-table-thead: var(--color-grey-200);
|
||||
--color-table-background: var(--color-grey-300);
|
||||
--color-table-stripe: var(--color-grey-325);
|
||||
--color-background: var(--color-grey-300);
|
||||
--color-text-menu-heading: var(--color-grey-850);
|
||||
--color-text-menu-link: var(--color-grey-800);
|
||||
--color-border-table-cell: var(--color-grey-400);
|
||||
--color-text-tab-active: var(--color-text);
|
||||
--color-border-input: var(--color-grey-400);
|
||||
--color-shadow-input: var(--color-grey-50);
|
||||
--color-background-input: var(--color-grey-350);
|
||||
--color-text-input-description: var(--color-grey-600);
|
||||
--color-text-input-placeholder: var(--color-grey-600);
|
||||
--color-text-watch-tag-list: #fa3e92;
|
||||
--color-background-code: var(--color-grey-200);
|
||||
--color-background-tab: rgba(0, 0, 0, 0.2);
|
||||
--color-background-tab-hover: rgba(0, 0, 0, 0.5);
|
||||
--color-background-snapshot-age: var(--color-grey-200);
|
||||
--color-shadow-jump: var(--color-grey-200);
|
||||
--color-icon-github: var(--color-white);
|
||||
--color-icon-github-hover: var(--color-grey-700);
|
||||
--color-watch-table-error: var(--color-light-red);
|
||||
--color-watch-table-row-text: var(--color-grey-800); }
|
||||
html[data-darkmode="true"] .icon-spread {
|
||||
filter: hue-rotate(-10deg) brightness(1.5); }
|
||||
html[data-darkmode="true"] .watch-table .title-col a[target="_blank"]::after,
|
||||
html[data-darkmode="true"] .watch-table .current-diff-url::after {
|
||||
filter: invert(0.5) hue-rotate(10deg) brightness(2); }
|
||||
html[data-darkmode="true"] .watch-table .watch-controls .state-off img {
|
||||
opacity: 0.3; }
|
||||
html[data-darkmode="true"] .watch-table .watch-controls .state-on img {
|
||||
opacity: 1.0; }
|
||||
html[data-darkmode="true"] .watch-table .unviewed {
|
||||
color: #fff; }
|
||||
html[data-darkmode="true"] .watch-table .unviewed.error {
|
||||
color: var(--color-watch-table-error); }
|
||||
|
||||
#diff-ui {
|
||||
background: #fff;
|
||||
background: var(--color-background);
|
||||
padding: 2em;
|
||||
margin-left: 1em;
|
||||
margin-right: 1em;
|
||||
border-radius: 5px;
|
||||
font-size: 11px; }
|
||||
border-radius: 5px; }
|
||||
#diff-ui #text {
|
||||
font-size: 11px; }
|
||||
#diff-ui table {
|
||||
table-layout: fixed;
|
||||
width: 100%; }
|
||||
@@ -45,6 +183,14 @@ ins {
|
||||
margin-left: 1em;
|
||||
display: inline-block;
|
||||
font-weight: normal; }
|
||||
#settings del {
|
||||
padding: 0.5em; }
|
||||
#settings ins {
|
||||
padding: 0.5em; }
|
||||
#settings option:checked {
|
||||
font-weight: bold; }
|
||||
#settings [type=radio], #settings [type=checkbox] {
|
||||
vertical-align: middle; }
|
||||
|
||||
.source {
|
||||
position: absolute;
|
||||
@@ -76,3 +222,10 @@ td#diff-col div {
|
||||
text-align: center; }
|
||||
.tab-pane-inner#screenshot img {
|
||||
max-width: 99%; }
|
||||
|
||||
#highlightSnippet {
|
||||
background: var(--color-background);
|
||||
padding: 1em;
|
||||
border-radius: 5px;
|
||||
background: var(--color-background);
|
||||
box-shadow: 1px 1px 4px var(--color-shadow-jump); }
|
||||
|
||||
@@ -1,96 +0,0 @@
|
||||
#diff-ui {
|
||||
|
||||
background: #fff;
|
||||
padding: 2em;
|
||||
margin-left: 1em;
|
||||
margin-right: 1em;
|
||||
border-radius: 5px;
|
||||
font-size: 11px;
|
||||
|
||||
table {
|
||||
table-layout: fixed;
|
||||
width: 100%;
|
||||
}
|
||||
td {
|
||||
padding: 3px 4px;
|
||||
border: 1px solid transparent;
|
||||
vertical-align: top;
|
||||
font: 1em monospace;
|
||||
text-align: left;
|
||||
}
|
||||
pre {
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
}
|
||||
h1 {
|
||||
display: inline;
|
||||
font-size: 100%;
|
||||
}
|
||||
del {
|
||||
text-decoration: none;
|
||||
color: #b30000;
|
||||
background: #fadad7;
|
||||
}
|
||||
|
||||
ins {
|
||||
background: #eaf2c2;
|
||||
color: #406619;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
#result {
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
#settings {
|
||||
background: rgba(0,0,0,.05);
|
||||
padding: 1em;
|
||||
border-radius: 10px;
|
||||
margin-bottom: 1em;
|
||||
color: #fff;
|
||||
font-size: 80%;
|
||||
label {
|
||||
margin-left: 1em;
|
||||
display: inline-block;
|
||||
font-weight: normal;
|
||||
}
|
||||
}
|
||||
|
||||
.source {
|
||||
position: absolute;
|
||||
right: 1%;
|
||||
top: .2em;
|
||||
}
|
||||
|
||||
@-moz-document url-prefix() {
|
||||
body {
|
||||
height: 99%; /* Hide scroll bar in Firefox */
|
||||
}
|
||||
}
|
||||
|
||||
td#diff-col div {
|
||||
text-align: justify;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
.ignored {
|
||||
background-color: #ccc;
|
||||
/* border: #0d91fa 1px solid; */
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
.triggered {
|
||||
background-color: #1b98f8;
|
||||
}
|
||||
|
||||
/* ignored and triggered? make it obvious error */
|
||||
.ignored.triggered {
|
||||
background-color: #ff0000;
|
||||
}
|
||||
|
||||
.tab-pane-inner#screenshot {
|
||||
text-align: center;
|
||||
img {
|
||||
max-width: 99%;
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,8 @@
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"build": "node-sass styles.scss -o .;node-sass diff.scss -o ."
|
||||
"watch": "node-sass -w scss -o .",
|
||||
"build": "node-sass scss -o ."
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
|
||||
136
changedetectionio/static/styles/scss/diff.scss
Normal file
@@ -0,0 +1,136 @@
|
||||
@import "parts/_variables.scss";
|
||||
|
||||
#diff-ui {
|
||||
|
||||
background: var(--color-background);
|
||||
padding: 2em;
|
||||
margin-left: 1em;
|
||||
margin-right: 1em;
|
||||
border-radius: 5px;
|
||||
|
||||
// The first tab 'text' diff
|
||||
#text {
|
||||
font-size: 11px;
|
||||
}
|
||||
|
||||
table {
|
||||
table-layout: fixed;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
td {
|
||||
padding: 3px 4px;
|
||||
border: 1px solid transparent;
|
||||
vertical-align: top;
|
||||
font: 1em monospace;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
pre {
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
}
|
||||
|
||||
h1 {
|
||||
display: inline;
|
||||
font-size: 100%;
|
||||
}
|
||||
|
||||
del {
|
||||
text-decoration: none;
|
||||
color: #b30000;
|
||||
background: #fadad7;
|
||||
}
|
||||
|
||||
ins {
|
||||
background: #eaf2c2;
|
||||
color: #406619;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
#result {
|
||||
white-space: pre-wrap;
|
||||
|
||||
.change {
|
||||
span {}
|
||||
}
|
||||
}
|
||||
|
||||
#settings {
|
||||
background: rgba(0, 0, 0, .05);
|
||||
padding: 1em;
|
||||
border-radius: 10px;
|
||||
margin-bottom: 1em;
|
||||
color: #fff;
|
||||
font-size: 80%;
|
||||
|
||||
label {
|
||||
margin-left: 1em;
|
||||
display: inline-block;
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
del {
|
||||
padding: 0.5em;
|
||||
}
|
||||
|
||||
ins {
|
||||
padding: 0.5em;
|
||||
}
|
||||
|
||||
option:checked {
|
||||
font-weight: bold;
|
||||
}
|
||||
[type=radio],[type=checkbox] {
|
||||
vertical-align: middle;
|
||||
}
|
||||
}
|
||||
|
||||
.source {
|
||||
position: absolute;
|
||||
right: 1%;
|
||||
top: .2em;
|
||||
}
|
||||
|
||||
@-moz-document url-prefix() {
|
||||
body {
|
||||
height: 99%;
|
||||
/* Hide scroll bar in Firefox */
|
||||
}
|
||||
}
|
||||
|
||||
td#diff-col div {
|
||||
text-align: justify;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
.ignored {
|
||||
background-color: #ccc;
|
||||
/* border: #0d91fa 1px solid; */
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
.triggered {
|
||||
background-color: #1b98f8;
|
||||
}
|
||||
|
||||
/* ignored and triggered? make it obvious error */
|
||||
.ignored.triggered {
|
||||
background-color: #ff0000;
|
||||
}
|
||||
|
||||
.tab-pane-inner#screenshot {
|
||||
text-align: center;
|
||||
|
||||
img {
|
||||
max-width: 99%;
|
||||
}
|
||||
}
|
||||
|
||||
#highlightSnippet {
|
||||
background: var(--color-background);
|
||||
padding: 1em;
|
||||
border-radius: 5px;
|
||||
background: var(--color-background);
|
||||
box-shadow: 1px 1px 4px var(--color-shadow-jump);
|
||||
}
|
||||
@@ -6,6 +6,15 @@
|
||||
}
|
||||
|
||||
li {
|
||||
&.browser-step-with-error {
|
||||
background-color: #ffd6d6;
|
||||
border-radius: 4px;
|
||||
}
|
||||
&:not(:first-child) {
|
||||
&:hover {
|
||||
opacity: 1.0;
|
||||
}
|
||||
}
|
||||
list-style: decimal;
|
||||
padding: 5px;
|
||||
.control {
|
||||
@@ -39,7 +48,7 @@
|
||||
#browser-steps .flex-wrapper {
|
||||
display: flex;
|
||||
flex-flow: row;
|
||||
height: 600px; /*@todo make this dynamic */
|
||||
height: 70vh;
|
||||
}
|
||||
|
||||
/* this is duplicate :( */
|
||||
@@ -70,6 +79,8 @@
|
||||
transform: translate(-50%, -50%);
|
||||
margin-left: -40px;
|
||||
z-index: 100;
|
||||
max-width: 350px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
/* nice tall skinny one */
|
||||
@@ -78,4 +89,11 @@
|
||||
height: 80px;
|
||||
font-size: 3px;
|
||||
}
|
||||
|
||||
#browsersteps-click-start {
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
color: var(--color-grey-400);
|
||||
}
|
||||
}
|
||||
25
changedetectionio/static/styles/scss/parts/_darkmode.scss
Normal file
@@ -0,0 +1,25 @@
|
||||
|
||||
#toggle-light-mode {
|
||||
/* width: 3rem;*/
|
||||
/* default */
|
||||
.icon-dark {
|
||||
display: none;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
html[data-darkmode="true"] {
|
||||
#toggle-light-mode {
|
||||
.icon-light {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.icon-dark {
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
ul#requests-extra_proxies {
|
||||
list-style: none;
|
||||
/* tidy up the table to look more "inline" */
|
||||
li {
|
||||
> label {
|
||||
display: none;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* each proxy entry is a `table` */
|
||||
table {
|
||||
tr {
|
||||
display: inline;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#request {
|
||||
/* Auto proxy scan/checker */
|
||||
label[for=proxy] {
|
||||
display: inline-block;
|
||||
}
|
||||
}
|
||||
|
||||
body.proxy-check-active {
|
||||
#request {
|
||||
.proxy-status {
|
||||
width: 2em;
|
||||
}
|
||||
|
||||
.proxy-check-details {
|
||||
font-size: 80%;
|
||||
color: #555;
|
||||
display: block;
|
||||
padding-left: 4em;
|
||||
}
|
||||
|
||||
.proxy-timing {
|
||||
font-size: 80%;
|
||||
padding-left: 1rem;
|
||||
color: var(--color-link);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#recommended-proxy {
|
||||
display: grid;
|
||||
gap: 2rem;
|
||||
@media (min-width: 991px) {
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
}
|
||||
|
||||
> div {
|
||||
border: 1px #aaa solid;
|
||||
border-radius: 4px;
|
||||
padding: 1em;
|
||||
}
|
||||
|
||||
padding-bottom: 1em;
|
||||
}
|
||||
38
changedetectionio/static/styles/scss/parts/_love.scss
Normal file
@@ -0,0 +1,38 @@
|
||||
#overlay {
|
||||
|
||||
opacity: 0.95;
|
||||
position: fixed;
|
||||
|
||||
width: 350px;
|
||||
max-width: 100%;
|
||||
height: 100%;
|
||||
top: 0;
|
||||
right: -350px;
|
||||
background-color: var(--color-table-stripe);
|
||||
z-index: 2;
|
||||
|
||||
transform: translateX(0);
|
||||
transition: transform .5s ease;
|
||||
|
||||
|
||||
&.visible {
|
||||
transform: translateX(-100%);
|
||||
|
||||
}
|
||||
|
||||
.content {
|
||||
font-size: 0.875rem;
|
||||
padding: 1rem;
|
||||
margin-top: 5rem;
|
||||
max-width: 400px;
|
||||
color: var(--color-watch-table-row-text);
|
||||
}
|
||||
}
|
||||
|
||||
#heartpath {
|
||||
&:hover {
|
||||
fill: #ff0000 !important;
|
||||
transition: all ease 0.3s !important;
|
||||
}
|
||||
transition: all ease 0.3s !important;
|
||||
}
|
||||
25
changedetectionio/static/styles/scss/parts/_menu.scss
Normal file
@@ -0,0 +1,25 @@
|
||||
.pure-menu-link {
|
||||
padding: 0.5rem 1em;
|
||||
line-height: 1.2rem;
|
||||
}
|
||||
|
||||
.pure-menu-item {
|
||||
svg {
|
||||
height: 1.2rem;
|
||||
}
|
||||
* {
|
||||
vertical-align: middle;
|
||||
}
|
||||
.github-link {
|
||||
height: 1.8rem;
|
||||
display: block;
|
||||
svg {
|
||||
height: 100%;
|
||||
}
|
||||
}
|
||||
.bi-heart {
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
}
|
||||
37
changedetectionio/static/styles/scss/parts/_pagination.scss
Normal file
@@ -0,0 +1,37 @@
|
||||
.pagination-page-info {
|
||||
color: #fff;
|
||||
font-size: 0.85rem;
|
||||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.pagination.menu {
|
||||
> * {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
li {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
a {
|
||||
padding: 0.65rem;
|
||||
margin: 3px;
|
||||
border: none;
|
||||
background: #444;
|
||||
border-radius: 2px;
|
||||
color: var(--color-text-button);
|
||||
&.disabled {
|
||||
display: none;
|
||||
}
|
||||
&.active {
|
||||
font-weight: bold;
|
||||
background: #888;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
background: #999;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
175
changedetectionio/static/styles/scss/parts/_variables.scss
Normal file
@@ -0,0 +1,175 @@
|
||||
/**
|
||||
* CSS custom properties (aka variables).
|
||||
*/
|
||||
|
||||
:root {
|
||||
--color-white: #fff;
|
||||
--color-grey-50: #111;
|
||||
--color-grey-100: #262626;
|
||||
--color-grey-200: #333;
|
||||
--color-grey-300: #444;
|
||||
--color-grey-325: #555;
|
||||
--color-grey-350: #565d64;
|
||||
--color-grey-400: #666;
|
||||
--color-grey-500: #777;
|
||||
--color-grey-600: #999;
|
||||
--color-grey-700: #cbcbcb;
|
||||
--color-grey-750: #ddd;
|
||||
--color-grey-800: #e0e0e0;
|
||||
--color-grey-850: #eee;
|
||||
--color-grey-900: #f2f2f2;
|
||||
--color-black: #000;
|
||||
--color-dark-red: #a00;
|
||||
--color-light-red: #dd0000;
|
||||
|
||||
--color-background-page: var(--color-grey-100);
|
||||
--color-background-gradient-first: #5ad8f7;
|
||||
--color-background-gradient-second: #2f50af;
|
||||
--color-background-gradient-third: #9150bf;
|
||||
--color-background: var(--color-white);
|
||||
--color-text: var(--color-grey-200);
|
||||
--color-link: #1b98f8;
|
||||
--color-menu-accent: #ed5900;
|
||||
--color-background-code: var(--color-grey-850);
|
||||
--color-error: var(--color-dark-red);
|
||||
--color-error-input: #ffebeb;
|
||||
--color-error-list: var(--color-light-red);
|
||||
--color-table-background: var(--color-background);
|
||||
--color-table-stripe: var(--color-grey-900);
|
||||
--color-text-tab: var(--color-white);
|
||||
--color-background-tab: rgba(255, 255, 255, 0.2);
|
||||
--color-background-tab-hover: rgba(255, 255, 255, 0.5);
|
||||
--color-text-tab-active: #222;
|
||||
--color-api-key: #0078e7;
|
||||
|
||||
--color-background-button-primary: #0078e7;
|
||||
--color-background-button-green: #42dd53;
|
||||
--color-background-button-red: #dd4242;
|
||||
--color-background-button-success: rgb(28, 184, 65);
|
||||
--color-background-button-error: rgb(202, 60, 60);
|
||||
--color-text-button-error: var(--color-white);
|
||||
--color-background-button-warning: rgb(202, 60, 60);
|
||||
--color-text-button-warning: var(--color-white);
|
||||
--color-background-button-secondary: rgb(66, 184, 221);
|
||||
--color-background-button-cancel: rgb(200, 200, 200);
|
||||
--color-text-button: var(--color-white);
|
||||
--color-background-button-tag: rgb(99, 99, 99);
|
||||
--color-background-snapshot-age: #dfdfdf;
|
||||
--color-error-text-snapshot-age: var(--color-white);
|
||||
--color-error-background-snapshot-age: #ff0000;
|
||||
--color-background-button-tag-active: #9c9c9c;
|
||||
|
||||
--color-text-messages: var(--color-white);
|
||||
--color-background-messages-message: rgba(255, 255, 255, .2);
|
||||
--color-background-messages-error: rgba(255, 1, 1, .5);
|
||||
--color-background-messages-notice: rgba(255, 255, 255, .5);
|
||||
--color-border-notification: #ccc;
|
||||
|
||||
--color-background-checkbox-operations: rgba(0, 0, 0, 0.05);
|
||||
--color-warning: #ff3300;
|
||||
--color-border-warning: var(--color-warning);
|
||||
--color-text-legend: var(--color-white);
|
||||
|
||||
--color-link-new-version: #e07171;
|
||||
--color-last-checked: #bbb;
|
||||
--color-text-footer: #444;
|
||||
--color-border-watch-table-cell: #eee;
|
||||
|
||||
--color-text-watch-tag-list: #e70069;
|
||||
--color-background-new-watch-form: rgba(0, 0, 0, 0.05);
|
||||
--color-background-new-watch-input: var(--color-white);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
|
||||
--color-border-input: var(--color-grey-500);
|
||||
--color-shadow-input: var(--color-grey-400);
|
||||
--color-background-input: var(--color-white);
|
||||
--color-text-input: var(--color-text);
|
||||
--color-text-input-description: var(--color-grey-500);
|
||||
--color-text-input-placeholder: var(--color-grey-600);
|
||||
|
||||
--color-background-table-thead: var(--color-grey-800);
|
||||
--color-border-table-cell: var(--color-grey-700);
|
||||
|
||||
--color-text-menu-heading: var(--color-grey-350);
|
||||
--color-text-menu-link: var(--color-grey-500);
|
||||
--color-background-menu-link-hover: var(--color-grey-850);
|
||||
--color-text-menu-link-hover: var(--color-grey-300);
|
||||
|
||||
--color-shadow-jump: var(--color-grey-500);
|
||||
--color-icon-github: var(--color-black);
|
||||
--color-icon-github-hover: var(--color-grey-300);
|
||||
|
||||
--color-watch-table-error: var(--color-dark-red);
|
||||
--color-watch-table-row-text: var(--color-grey-100);
|
||||
}
|
||||
|
||||
html[data-darkmode="true"] {
|
||||
--color-link: #59bdfb;
|
||||
--color-text: var(--color-white);
|
||||
|
||||
--color-background-gradient-first: #3f90a5;
|
||||
--color-background-gradient-second: #1e316c;
|
||||
--color-background-gradient-third: #4d2c64;
|
||||
|
||||
--color-background-new-watch-input: var(--color-grey-100);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-background-table-thead: var(--color-grey-200);
|
||||
--color-table-background: var(--color-grey-300);
|
||||
--color-table-stripe: var(--color-grey-325);
|
||||
--color-background: var(--color-grey-300);
|
||||
--color-text-menu-heading: var(--color-grey-850);
|
||||
--color-text-menu-link: var(--color-grey-800);
|
||||
--color-border-table-cell: var(--color-grey-400);
|
||||
--color-text-tab-active: var(--color-text);
|
||||
|
||||
--color-border-input: var(--color-grey-400);
|
||||
--color-shadow-input: var(--color-grey-50);
|
||||
--color-background-input: var(--color-grey-350);
|
||||
--color-text-input-description: var(--color-grey-600);
|
||||
--color-text-input-placeholder: var(--color-grey-600);
|
||||
--color-text-watch-tag-list: #fa3e92;
|
||||
--color-background-code: var(--color-grey-200);
|
||||
|
||||
--color-background-tab: rgba(0, 0, 0, 0.2);
|
||||
--color-background-tab-hover: rgba(0, 0, 0, 0.5);
|
||||
|
||||
--color-background-snapshot-age: var(--color-grey-200);
|
||||
--color-shadow-jump: var(--color-grey-200);
|
||||
--color-icon-github: var(--color-white);
|
||||
--color-icon-github-hover: var(--color-grey-700);
|
||||
--color-watch-table-error: var(--color-light-red);
|
||||
--color-watch-table-row-text: var(--color-grey-800);
|
||||
|
||||
|
||||
.icon-spread {
|
||||
filter: hue-rotate(-10deg) brightness(1.5);
|
||||
}
|
||||
|
||||
.watch-table {
|
||||
|
||||
.title-col a[target="_blank"]::after,
|
||||
.current-diff-url::after {
|
||||
filter: invert(.5) hue-rotate(10deg) brightness(2);
|
||||
}
|
||||
|
||||
.watch-controls {
|
||||
.state-off {
|
||||
img {
|
||||
opacity: 0.3;
|
||||
}
|
||||
}
|
||||
.state-on {
|
||||
img {
|
||||
opacity: 1.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.unviewed {
|
||||
color: #fff;
|
||||
&.error {
|
||||
color: var(--color-watch-table-error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
|
||||
#selector-wrapper {
|
||||
height: 100%;
|
||||
max-height: 70vh;
|
||||
overflow-y: scroll;
|
||||
position: relative;
|
||||
|
||||
//width: 100%;
|
||||
>img {
|
||||
position: absolute;
|
||||
z-index: 4;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
>canvas {
|
||||
position: relative;
|
||||
z-index: 5;
|
||||
max-width: 100%;
|
||||
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#selector-current-xpath {
|
||||
font-size: 80%;
|
||||
}
|
||||
@@ -1,42 +1,125 @@
|
||||
/*
|
||||
* -- BASE STYLES --
|
||||
* Most of these are inherited from Base, but I want to change a few.
|
||||
nvm use v14.18.1 && npm install && npm run build
|
||||
* or npm run watch
|
||||
*/
|
||||
|
||||
@import "parts/spinners";
|
||||
@import "parts/browser-steps";
|
||||
@import "parts/_arrows.scss";
|
||||
@import "parts/_arrows";
|
||||
@import "parts/_browser-steps";
|
||||
@import "parts/_extra_proxies";
|
||||
@import "parts/_pagination";
|
||||
@import "parts/_spinners";
|
||||
@import "parts/_variables";
|
||||
@import "parts/_darkmode";
|
||||
@import "parts/_menu";
|
||||
@import "parts/_love";
|
||||
|
||||
body {
|
||||
color: #333;
|
||||
background: #262626;
|
||||
color: var(--color-text);
|
||||
background: var(--color-background-page);
|
||||
}
|
||||
|
||||
.visually-hidden {
|
||||
clip: rect(0 0 0 0);
|
||||
clip-path: inset(50%);
|
||||
height: 1px;
|
||||
overflow: hidden;
|
||||
position: absolute;
|
||||
white-space: nowrap;
|
||||
width: 1px;
|
||||
}
|
||||
|
||||
// Row icons like chrome, pdf, share, etc
|
||||
.status-icon {
|
||||
display: inline-block;
|
||||
height: 1rem;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.pure-table-even {
|
||||
background: #fff;
|
||||
background: var(--color-background);
|
||||
}
|
||||
|
||||
/* Some styles from https://css-tricks.com/ */
|
||||
a {
|
||||
text-decoration: none;
|
||||
color: #1b98f8;
|
||||
color: var(--color-link);
|
||||
}
|
||||
|
||||
a.github-link {
|
||||
color: var(--color-icon-github);
|
||||
margin: 0 1rem 0 0.5rem;
|
||||
|
||||
svg {
|
||||
fill: currentColor;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
color: var(--color-icon-github-hover);
|
||||
}
|
||||
}
|
||||
|
||||
#search-q {
|
||||
opacity: 0;
|
||||
-webkit-transition: all .9s ease;
|
||||
-moz-transition: all .9s ease;
|
||||
transition: all .9s ease;
|
||||
width: 0;
|
||||
display: none;
|
||||
&.expanded {
|
||||
width: auto;
|
||||
display: inline-block;
|
||||
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
#search-result-info {
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
button.toggle-button {
|
||||
vertical-align: middle;
|
||||
background: transparent;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
|
||||
color: var(--color-icon-github);
|
||||
|
||||
&:hover {
|
||||
color: var(--color-icon-github-hover);
|
||||
}
|
||||
|
||||
svg {
|
||||
fill: currentColor;
|
||||
}
|
||||
|
||||
.icon-light {
|
||||
display: block;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
.pure-menu-horizontal {
|
||||
background: #fff;
|
||||
background: var(--color-background);
|
||||
padding: 5px;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
border-bottom: 2px solid #ed5900;
|
||||
border-bottom: 2px solid var(--color-menu-accent);
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.pure-menu-heading {
|
||||
color: var(--color-text-menu-heading);
|
||||
}
|
||||
|
||||
.pure-menu-link {
|
||||
color: var(--color-text-menu-link);
|
||||
|
||||
&:hover {
|
||||
background-color: var(--color-background-menu-link-hover);
|
||||
color: var(--color-text-menu-link-hover);
|
||||
}
|
||||
}
|
||||
|
||||
section.content {
|
||||
padding-top: 5em;
|
||||
padding-bottom: 1em;
|
||||
@@ -47,7 +130,8 @@ section.content {
|
||||
}
|
||||
|
||||
code {
|
||||
background: #eee;
|
||||
background: var(--color-background-code);
|
||||
color: var(--color-text);
|
||||
}
|
||||
|
||||
/* table related */
|
||||
@@ -55,30 +139,36 @@ code {
|
||||
width: 100%;
|
||||
font-size: 80%;
|
||||
|
||||
tr.unviewed {
|
||||
font-weight: bold;
|
||||
tr {
|
||||
&.unviewed {
|
||||
font-weight: bold;
|
||||
}
|
||||
&.error {
|
||||
color: var(--color-watch-table-error);
|
||||
}
|
||||
color: var(--color-watch-table-row-text);
|
||||
}
|
||||
|
||||
.error {
|
||||
color: #a00;
|
||||
}
|
||||
|
||||
td {
|
||||
white-space: nowrap;
|
||||
&.title-col {
|
||||
word-break: break-all;
|
||||
white-space: normal;
|
||||
}
|
||||
}
|
||||
|
||||
td.title-col {
|
||||
word-break: break-all;
|
||||
white-space: normal;
|
||||
}
|
||||
|
||||
th {
|
||||
white-space: nowrap;
|
||||
|
||||
a {
|
||||
font-weight: normal;
|
||||
|
||||
&.active {
|
||||
font-weight: bolder;
|
||||
}
|
||||
|
||||
&.inactive {
|
||||
.arrow {
|
||||
display: none;
|
||||
@@ -87,14 +177,15 @@ code {
|
||||
}
|
||||
}
|
||||
|
||||
.title-col a[target="_blank"]::after, .current-diff-url::after {
|
||||
.title-col a[target="_blank"]::after,
|
||||
.current-diff-url::after {
|
||||
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
|
||||
margin: 0 3px 0 5px;
|
||||
}
|
||||
}
|
||||
|
||||
.watch-tag-list {
|
||||
color: #e70069;
|
||||
color: var(--color-text-watch-tag-list);
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
@@ -126,10 +217,11 @@ code {
|
||||
|
||||
body:after {
|
||||
content: "";
|
||||
background: linear-gradient(130deg, #5ad8f7, #2f50af 41.07%, #9150bf 84.05%);
|
||||
background: linear-gradient(130deg, var(--color-background-gradient-first), var(--color-background-gradient-second) 41.07%, var(--color-background-gradient-third) 84.05%);
|
||||
}
|
||||
|
||||
body:after, body:before {
|
||||
body:after,
|
||||
body:before {
|
||||
display: block;
|
||||
height: 650px;
|
||||
position: absolute;
|
||||
@@ -149,7 +241,8 @@ body::before {
|
||||
background-size: cover
|
||||
}
|
||||
|
||||
body:after, body:before {
|
||||
body:after,
|
||||
body:before {
|
||||
-webkit-clip-path: polygon(100% 0, 0 0, 0 77.5%, 1% 77.4%, 2% 77.1%, 3% 76.6%, 4% 75.9%, 5% 75.05%, 6% 74.05%, 7% 72.95%, 8% 71.75%, 9% 70.55%, 10% 69.3%, 11% 68.05%, 12% 66.9%, 13% 65.8%, 14% 64.8%, 15% 64%, 16% 63.35%, 17% 62.85%, 18% 62.6%, 19% 62.5%, 20% 62.65%, 21% 63%, 22% 63.5%, 23% 64.2%, 24% 65.1%, 25% 66.1%, 26% 67.2%, 27% 68.4%, 28% 69.65%, 29% 70.9%, 30% 72.15%, 31% 73.3%, 32% 74.35%, 33% 75.3%, 34% 76.1%, 35% 76.75%, 36% 77.2%, 37% 77.45%, 38% 77.5%, 39% 77.3%, 40% 76.95%, 41% 76.4%, 42% 75.65%, 43% 74.75%, 44% 73.75%, 45% 72.6%, 46% 71.4%, 47% 70.15%, 48% 68.9%, 49% 67.7%, 50% 66.55%, 51% 65.5%, 52% 64.55%, 53% 63.75%, 54% 63.15%, 55% 62.75%, 56% 62.55%, 57% 62.5%, 58% 62.7%, 59% 63.1%, 60% 63.7%, 61% 64.45%, 62% 65.4%, 63% 66.45%, 64% 67.6%, 65% 68.8%, 66% 70.05%, 67% 71.3%, 68% 72.5%, 69% 73.6%, 70% 74.65%, 71% 75.55%, 72% 76.35%, 73% 76.9%, 74% 77.3%, 75% 77.5%, 76% 77.45%, 77% 77.25%, 78% 76.8%, 79% 76.2%, 80% 75.4%, 81% 74.45%, 82% 73.4%, 83% 72.25%, 84% 71.05%, 85% 69.8%, 86% 68.55%, 87% 67.35%, 88% 66.2%, 89% 65.2%, 90% 64.3%, 91% 63.55%, 92% 63%, 93% 62.65%, 94% 62.5%, 95% 62.55%, 96% 62.8%, 97% 63.3%, 98% 63.9%, 99% 64.75%, 100% 65.7%);
|
||||
clip-path: polygon(100% 0, 0 0, 0 77.5%, 1% 77.4%, 2% 77.1%, 3% 76.6%, 4% 75.9%, 5% 75.05%, 6% 74.05%, 7% 72.95%, 8% 71.75%, 9% 70.55%, 10% 69.3%, 11% 68.05%, 12% 66.9%, 13% 65.8%, 14% 64.8%, 15% 64%, 16% 63.35%, 17% 62.85%, 18% 62.6%, 19% 62.5%, 20% 62.65%, 21% 63%, 22% 63.5%, 23% 64.2%, 24% 65.1%, 25% 66.1%, 26% 67.2%, 27% 68.4%, 28% 69.65%, 29% 70.9%, 30% 72.15%, 31% 73.3%, 32% 74.35%, 33% 75.3%, 34% 76.1%, 35% 76.75%, 36% 77.2%, 37% 77.45%, 38% 77.5%, 39% 77.3%, 40% 76.95%, 41% 76.4%, 42% 75.65%, 43% 74.75%, 44% 73.75%, 45% 72.6%, 46% 71.4%, 47% 70.15%, 48% 68.9%, 49% 67.7%, 50% 66.55%, 51% 65.5%, 52% 64.55%, 53% 63.75%, 54% 63.15%, 55% 62.75%, 56% 62.55%, 57% 62.5%, 58% 62.7%, 59% 63.1%, 60% 63.7%, 61% 64.45%, 62% 65.4%, 63% 66.45%, 64% 67.6%, 65% 68.8%, 66% 70.05%, 67% 71.3%, 68% 72.5%, 69% 73.6%, 70% 74.65%, 71% 75.55%, 72% 76.35%, 73% 76.9%, 74% 77.3%, 75% 77.5%, 76% 77.45%, 77% 77.25%, 78% 76.8%, 79% 76.2%, 80% 75.4%, 81% 74.45%, 82% 73.4%, 83% 72.25%, 84% 71.05%, 85% 69.8%, 86% 68.55%, 87% 67.35%, 88% 66.2%, 89% 65.2%, 90% 64.3%, 91% 63.55%, 92% 63%, 93% 62.65%, 94% 62.5%, 95% 62.55%, 96% 62.8%, 97% 63.3%, 98% 63.9%, 99% 64.75%, 100% 65.7%)
|
||||
}
|
||||
@@ -158,6 +251,10 @@ body:after, body:before {
|
||||
font-size: 85%;
|
||||
}
|
||||
|
||||
.button-xsmall {
|
||||
font-size: 70%;
|
||||
}
|
||||
|
||||
.fetch-error {
|
||||
padding-top: 1em;
|
||||
font-size: 80%;
|
||||
@@ -165,51 +262,57 @@ body:after, body:before {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.pure-button-primary,
|
||||
a.pure-button-primary,
|
||||
.pure-button-selected,
|
||||
a.pure-button-selected {
|
||||
background-color: var(--color-background-button-primary);
|
||||
}
|
||||
|
||||
.button-secondary {
|
||||
color: white;
|
||||
color: var(--color-text-button);
|
||||
border-radius: 4px;
|
||||
text-shadow: 0 1px 1px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
.button-success {
|
||||
background: rgb(28, 184, 65);
|
||||
/* this is a green */
|
||||
background: var(--color-background-button-success);
|
||||
}
|
||||
|
||||
.button-tag {
|
||||
background: rgb(99, 99, 99);
|
||||
color: #fff;
|
||||
background: var(--color-background-button-tag);
|
||||
color: var(--color-text-button);
|
||||
font-size: 65%;
|
||||
border-bottom-left-radius: initial;
|
||||
border-bottom-right-radius: initial;
|
||||
|
||||
&.active {
|
||||
background: #9c9c9c;
|
||||
background: var(--color-background-button-tag-active);
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
.button-error {
|
||||
background: rgb(202, 60, 60);
|
||||
/* this is a maroon */
|
||||
background: var(--color-background-button-error);
|
||||
color: var(--color-text-button-error);
|
||||
}
|
||||
|
||||
.button-warning {
|
||||
background: rgb(223, 117, 20);
|
||||
/* this is an orange */
|
||||
background: var(--color-background-button-warning);
|
||||
color: var(--color-text-button-warning);
|
||||
}
|
||||
|
||||
.button-secondary {
|
||||
background: rgb(66, 184, 221);
|
||||
/* this is a light blue */
|
||||
background: var(--color-background-button-secondary);
|
||||
}
|
||||
|
||||
|
||||
.button-cancel {
|
||||
background: rgb(200, 200, 200);
|
||||
/* this is a green */
|
||||
background: var(--color-background-button-cancel);
|
||||
}
|
||||
|
||||
#save_button {
|
||||
margin-right: 1rem;
|
||||
}
|
||||
|
||||
.messages {
|
||||
@@ -217,50 +320,62 @@ body:after, body:before {
|
||||
list-style: none;
|
||||
padding: 1em;
|
||||
border-radius: 10px;
|
||||
color: #fff;
|
||||
color: var(--color-text-messages);
|
||||
font-weight: bold;
|
||||
|
||||
&.message {
|
||||
background: rgba(255, 255, 255, .2);
|
||||
background: var(--color-background-messages-message);
|
||||
}
|
||||
|
||||
&.error {
|
||||
background: rgba(255, 1, 1, .5);
|
||||
background: var(--color-background-messages-error);
|
||||
}
|
||||
|
||||
&.notice {
|
||||
background: rgba(255, 255, 255, .5);
|
||||
background: var(--color-background-messages-notice);
|
||||
}
|
||||
}
|
||||
|
||||
&.with-share-link {
|
||||
> *:hover {
|
||||
>*:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.notifications-wrapper {
|
||||
padding: 0.5rem 0 1rem 0;
|
||||
}
|
||||
|
||||
label {
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
|
||||
#notification-customisation {
|
||||
border: 1px solid #ccc;
|
||||
border: 1px solid var(--color-border-notification);
|
||||
padding: 0.5rem;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
#notification-error-log {
|
||||
border: 1px solid #ccc;
|
||||
border: 1px solid var(--color-border-notification);
|
||||
padding: 1rem;
|
||||
border-radius: 5px;
|
||||
overflow-wrap: break-word;
|
||||
}
|
||||
|
||||
#token-table {
|
||||
&.pure-table td, &.pure-table th {
|
||||
|
||||
&.pure-table td,
|
||||
&.pure-table th {
|
||||
font-size: 80%;
|
||||
}
|
||||
}
|
||||
|
||||
#new-watch-form {
|
||||
background: rgba(0, 0, 0, .05);
|
||||
background: var(--color-background-new-watch-form);
|
||||
padding: 1em;
|
||||
border-radius: 10px;
|
||||
margin-bottom: 1em;
|
||||
@@ -270,19 +385,25 @@ body:after, body:before {
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
input:not(.pure-button) {
|
||||
background-color: var(--color-background-new-watch-input);
|
||||
color: var(--color-text-new-watch-input);
|
||||
}
|
||||
|
||||
.label {
|
||||
display: none;
|
||||
}
|
||||
|
||||
legend {
|
||||
color: #fff;
|
||||
color: var(--color-text-legend);
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
#watch-add-wrapper-zone {
|
||||
> div {
|
||||
>div {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 760px) {
|
||||
#url {
|
||||
width: 100%;
|
||||
@@ -300,15 +421,15 @@ body:after, body:before {
|
||||
position: fixed;
|
||||
left: 0px;
|
||||
top: 120px;
|
||||
background: #fff;
|
||||
background: var(--color-background);
|
||||
padding: 10px;
|
||||
border-top-right-radius: 5px;
|
||||
border-bottom-right-radius: 5px;
|
||||
box-shadow: 5px 0 5px -2px #888;
|
||||
box-shadow: 1px 1px 4px var(--color-shadow-jump);
|
||||
|
||||
a {
|
||||
color: #1b98f8;
|
||||
cursor: grabbing;
|
||||
color: var(--color-link);
|
||||
cursor: pointer;
|
||||
-moz-user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-ms-user-select: none;
|
||||
@@ -319,8 +440,8 @@ body:after, body:before {
|
||||
|
||||
footer {
|
||||
padding: 10px;
|
||||
background: #fff;
|
||||
color: #444;
|
||||
background: var(--color-background);
|
||||
color: var(--color-text-footer);
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
@@ -343,11 +464,15 @@ footer {
|
||||
position: absolute;
|
||||
top: 60px;
|
||||
font-size: 65%;
|
||||
background: #fff;
|
||||
background: var(--color-background);
|
||||
padding: 10px;
|
||||
|
||||
&#left-sticky {
|
||||
left: 0px;
|
||||
left: 0;
|
||||
position: fixed;
|
||||
border-top-right-radius: 5px;
|
||||
border-bottom-right-radius: 5px;
|
||||
box-shadow: 1px 1px 4px var(--color-shadow-jump);
|
||||
}
|
||||
|
||||
&#right-sticky {
|
||||
@@ -362,10 +487,12 @@ footer {
|
||||
}
|
||||
|
||||
#new-version-text a {
|
||||
color: #e07171;
|
||||
color: var(--color-link-new-version);
|
||||
}
|
||||
|
||||
.watch-controls {
|
||||
color: #f8321b;
|
||||
|
||||
.state-on {
|
||||
img {
|
||||
opacity: 0.8;
|
||||
@@ -383,7 +510,6 @@ footer {
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
.monospaced-textarea {
|
||||
@@ -392,7 +518,8 @@ footer {
|
||||
font-family: monospace;
|
||||
white-space: pre;
|
||||
overflow-wrap: normal;
|
||||
overflow-x: scroll;
|
||||
// No scrollbars until needed.
|
||||
overflow-x: auto;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -407,7 +534,9 @@ footer {
|
||||
}
|
||||
}
|
||||
|
||||
.pure-control-group, .pure-group, .pure-controls {
|
||||
.pure-control-group,
|
||||
.pure-group,
|
||||
.pure-controls {
|
||||
padding-bottom: 1em;
|
||||
|
||||
div {
|
||||
@@ -415,28 +544,32 @@ footer {
|
||||
}
|
||||
|
||||
.checkbox {
|
||||
> * {
|
||||
>* {
|
||||
display: inline;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
> label {
|
||||
>label {
|
||||
padding-left: 5px;
|
||||
}
|
||||
}
|
||||
|
||||
legend {
|
||||
color: var(--color-text-legend);
|
||||
}
|
||||
}
|
||||
|
||||
/* The input fields with errors */
|
||||
.error {
|
||||
input {
|
||||
background-color: #ffebeb;
|
||||
background-color: var(--color-error-input);
|
||||
}
|
||||
}
|
||||
|
||||
/* The list of errors */
|
||||
ul.errors {
|
||||
padding: .5em .6em;
|
||||
border: 1px solid #dd0000;
|
||||
border: 1px solid var(--color-error-list);
|
||||
border-radius: 4px;
|
||||
vertical-align: middle;
|
||||
-webkit-box-sizing: border-box;
|
||||
@@ -444,7 +577,7 @@ footer {
|
||||
|
||||
li {
|
||||
margin-left: 1em;
|
||||
color: #dd0000;
|
||||
color: var(--color-error-list);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -462,7 +595,7 @@ footer {
|
||||
list-style: none;
|
||||
|
||||
li {
|
||||
> * {
|
||||
>* {
|
||||
display: inline-block;
|
||||
}
|
||||
}
|
||||
@@ -471,21 +604,25 @@ footer {
|
||||
}
|
||||
|
||||
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
@media only screen and (max-width: 760px),
|
||||
(min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
.box {
|
||||
max-width: 95%
|
||||
}
|
||||
|
||||
.edit-form {
|
||||
padding: 0.5em;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
#nav-menu {
|
||||
overflow-x: scroll;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 800px) {
|
||||
@media only screen and (max-width: 760px),
|
||||
(min-device-width: 768px) and (max-device-width: 800px) {
|
||||
|
||||
div.sticky-tab#hosted-sticky {
|
||||
top: 60px;
|
||||
@@ -514,24 +651,29 @@ footer {
|
||||
and also iPads specifically.
|
||||
*/
|
||||
.watch-table {
|
||||
|
||||
/* Force table to not be like tables anymore */
|
||||
thead, tbody, th, td, tr {
|
||||
thead,
|
||||
tbody,
|
||||
th,
|
||||
td,
|
||||
tr {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.last-checked {
|
||||
> span {
|
||||
>span {
|
||||
vertical-align: middle;
|
||||
}
|
||||
}
|
||||
|
||||
.last-checked::before {
|
||||
color: #555;
|
||||
color: var(--color-last-checked);
|
||||
content: "Last Checked ";
|
||||
}
|
||||
|
||||
.last-changed::before {
|
||||
color: #555;
|
||||
color: var(--color-last-checked);
|
||||
content: "Last Changed ";
|
||||
}
|
||||
|
||||
@@ -547,15 +689,17 @@ footer {
|
||||
left: -9999px;
|
||||
}
|
||||
|
||||
.pure-table td, .pure-table th {
|
||||
.pure-table td,
|
||||
.pure-table th {
|
||||
border: none;
|
||||
}
|
||||
|
||||
td {
|
||||
/* Behave like a "row" */
|
||||
border: none;
|
||||
border-bottom: 1px solid #eee;
|
||||
border-bottom: 1px solid var(--color-border-watch-table-cell);
|
||||
vertical-align: middle;
|
||||
|
||||
&:before {
|
||||
/* Top/left values mimic padding */
|
||||
top: 6px;
|
||||
@@ -568,11 +712,11 @@ footer {
|
||||
|
||||
&.pure-table-striped {
|
||||
tr {
|
||||
background-color: #fff;
|
||||
background-color: var(--color-table-background);
|
||||
}
|
||||
|
||||
tr:nth-child(2n-1) {
|
||||
background-color: #eee;
|
||||
background-color: var(--color-table-stripe);
|
||||
}
|
||||
|
||||
tr:nth-child(2n-1) td {
|
||||
@@ -583,12 +727,66 @@ footer {
|
||||
}
|
||||
}
|
||||
|
||||
.pure-table {
|
||||
border-color: var(--color-border-table-cell);
|
||||
|
||||
thead {
|
||||
background-color: var(--color-background-table-thead);
|
||||
color: var(--color-text);
|
||||
}
|
||||
|
||||
td,
|
||||
th {
|
||||
border-left-color: var(--color-border-table-cell);
|
||||
}
|
||||
}
|
||||
|
||||
.pure-table-striped {
|
||||
tr:nth-child(2n-1) {
|
||||
td {
|
||||
background-color: var(--color-table-stripe);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.pure-form input[type=color],
|
||||
.pure-form input[type=date],
|
||||
.pure-form input[type=datetime-local],
|
||||
.pure-form input[type=datetime],
|
||||
.pure-form input[type=email],
|
||||
.pure-form input[type=month],
|
||||
.pure-form input[type=number],
|
||||
.pure-form input[type=password],
|
||||
.pure-form input[type=search],
|
||||
.pure-form input[type=tel],
|
||||
.pure-form input[type=text],
|
||||
.pure-form input[type=time],
|
||||
.pure-form input[type=url],
|
||||
.pure-form input[type=week],
|
||||
.pure-form select,
|
||||
.pure-form textarea {
|
||||
border: var(--color-border-input);
|
||||
box-shadow: inset 0 1px 3px var(--color-shadow-input);
|
||||
background-color: var(--color-background-input);
|
||||
color: var(--color-text-input);
|
||||
|
||||
&:active {
|
||||
background-color: var(--color-background-input);
|
||||
}
|
||||
}
|
||||
|
||||
input::placeholder,
|
||||
textarea::placeholder {
|
||||
color: var(--color-text-input-placeholder);
|
||||
}
|
||||
|
||||
|
||||
/** Desktop vs mobile input field strategy
|
||||
- We dont use 'size' with <input> because `size` is too unreliable to override, and will often push-out
|
||||
- Rely always on width in CSS
|
||||
*/
|
||||
@media only screen and (min-width: 761px) {
|
||||
|
||||
/* m-d is medium-desktop */
|
||||
.m-d {
|
||||
min-width: 80%;
|
||||
@@ -605,16 +803,23 @@ footer {
|
||||
li {
|
||||
margin-right: 3px;
|
||||
display: inline-block;
|
||||
color: #fff;
|
||||
color: var(--color-text-tab);
|
||||
border-top-left-radius: 5px;
|
||||
border-top-right-radius: 5px;
|
||||
background-color: rgba(255, 255, 255, 0.2);
|
||||
background-color: var(--color-background-tab);
|
||||
|
||||
&.active, :target {
|
||||
background-color: #fff;
|
||||
&:not(.active) {
|
||||
&:hover {
|
||||
background-color: var(--color-background-tab-hover);
|
||||
}
|
||||
}
|
||||
|
||||
&.active,
|
||||
:target {
|
||||
background-color: var(--color-background);
|
||||
|
||||
a {
|
||||
color: #222;
|
||||
color: var(--color-text-tab-active);
|
||||
font-weight: bold;
|
||||
}
|
||||
}
|
||||
@@ -622,22 +827,24 @@ footer {
|
||||
a {
|
||||
display: block;
|
||||
padding: 0.8em;
|
||||
color: #fff;
|
||||
color: var(--color-text-tab);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$form-edge-padding: 20px;
|
||||
|
||||
.pure-form-stacked {
|
||||
> div:first-child {
|
||||
>div:first-child {
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
|
||||
.login-form {
|
||||
.inner {
|
||||
background: #fff;;
|
||||
background: var(--color-background);
|
||||
;
|
||||
padding: $form-edge-padding;
|
||||
border-radius: 5px;
|
||||
}
|
||||
@@ -667,11 +874,13 @@ $form-edge-padding: 20px;
|
||||
#selector-header {
|
||||
padding-bottom: 1em;
|
||||
}
|
||||
|
||||
body.full-width {
|
||||
.edit-form {
|
||||
width: 95%;
|
||||
}
|
||||
}
|
||||
|
||||
.edit-form {
|
||||
min-width: 70%;
|
||||
/* so it cant overflow */
|
||||
@@ -682,17 +891,36 @@ body.full-width {
|
||||
}
|
||||
|
||||
.inner {
|
||||
background: #fff;;
|
||||
background: var(--color-background);
|
||||
padding: $form-edge-padding;
|
||||
}
|
||||
|
||||
#actions {
|
||||
display: block;
|
||||
background: #fff;
|
||||
background: var(--color-background);
|
||||
}
|
||||
|
||||
.pure-form-message-inline {
|
||||
padding-left: 0;
|
||||
color: var(--color-text-input-description);
|
||||
code {
|
||||
font-size: .875em;
|
||||
}
|
||||
}
|
||||
.text-filtering {
|
||||
h3 {
|
||||
margin-top: 0;
|
||||
}
|
||||
border: 1px solid #ccc;
|
||||
padding: 1rem;
|
||||
border-radius: 5px;
|
||||
margin-bottom: 1rem;
|
||||
fieldset:last-of-type {
|
||||
padding-bottom: 0;
|
||||
.pure-control-group {
|
||||
padding-bottom: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -712,31 +940,7 @@ ul {
|
||||
}
|
||||
}
|
||||
|
||||
#selector-wrapper {
|
||||
height: 100%;
|
||||
overflow-y: scroll;
|
||||
position: relative;
|
||||
//width: 100%;
|
||||
> img {
|
||||
position: absolute;
|
||||
z-index: 4;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
> canvas {
|
||||
position: relative;
|
||||
z-index: 5;
|
||||
max-width: 100%;
|
||||
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#selector-current-xpath {
|
||||
font-size: 80%;
|
||||
}
|
||||
@import "parts/_visualselector";
|
||||
|
||||
#webdriver-override-options {
|
||||
input[type="number"] {
|
||||
@@ -751,54 +955,61 @@ ul {
|
||||
}
|
||||
|
||||
#api-key-copy {
|
||||
color: #0078e7;
|
||||
color: var(--color-api-key);
|
||||
}
|
||||
|
||||
.button-green {
|
||||
background-color: #42dd53;
|
||||
background-color: var(--color-background-button-green);
|
||||
}
|
||||
|
||||
.button-red {
|
||||
background-color: #dd4242;
|
||||
background-color: var(--color-background-button-red);
|
||||
}
|
||||
|
||||
.noselect {
|
||||
-webkit-touch-callout: none; /* iOS Safari */
|
||||
-webkit-user-select: none; /* Safari */
|
||||
-moz-user-select: none; /* Old versions of Firefox */
|
||||
-ms-user-select: none; /* Internet Explorer/Edge */
|
||||
-webkit-touch-callout: none;
|
||||
/* iOS Safari */
|
||||
-webkit-user-select: none;
|
||||
/* Safari */
|
||||
-moz-user-select: none;
|
||||
/* Old versions of Firefox */
|
||||
-ms-user-select: none;
|
||||
/* Internet Explorer/Edge */
|
||||
user-select: none;
|
||||
/* Non-prefixed version, currently
|
||||
supported by Chrome, Edge, Opera and Firefox */
|
||||
supported by Chrome, Edge, Opera and Firefox */
|
||||
}
|
||||
|
||||
.snapshot-age {
|
||||
padding: 4px;
|
||||
background-color: #dfdfdf;
|
||||
margin: 0.5rem 0;
|
||||
background-color: var(--color-background-snapshot-age);
|
||||
border-radius: 3px;
|
||||
font-weight: bold;
|
||||
margin-bottom: 4px;
|
||||
|
||||
&.error {
|
||||
background-color: #ff0000;
|
||||
color: #fff;
|
||||
background-color: var(--color-error-background-snapshot-age);
|
||||
color: var(--color-error-text-snapshot-age);
|
||||
}
|
||||
}
|
||||
|
||||
#checkbox-operations {
|
||||
background: rgba(0, 0, 0, 0.05);
|
||||
background: var(--color-background-checkbox-operations);
|
||||
padding: 1em;
|
||||
border-radius: 10px;
|
||||
margin-bottom: 1em;
|
||||
display: none;
|
||||
}
|
||||
|
||||
.checkbox-uuid {
|
||||
> * {
|
||||
>* {
|
||||
vertical-align: middle;
|
||||
}
|
||||
}
|
||||
|
||||
.inline-warning {
|
||||
> span {
|
||||
>span {
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
}
|
||||
@@ -809,8 +1020,63 @@ ul {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
border: 1px solid #ff3300;
|
||||
border: 1px solid var(--color-border-warning);
|
||||
padding: 0.5rem;
|
||||
border-radius: 5px;
|
||||
color: #ff3300;
|
||||
color: var(--color-warning);
|
||||
}
|
||||
|
||||
/* automatic price following helpers */
|
||||
.tracking-ldjson-price-data {
|
||||
background-color: var(--color-background-button-green);
|
||||
color: #000;
|
||||
padding: 3px;
|
||||
border-radius: 3px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.ldjson-price-track-offer {
|
||||
a.pure-button {
|
||||
border-radius: 3px;
|
||||
padding: 3px;
|
||||
background-color: var(--color-background-button-green);
|
||||
}
|
||||
|
||||
font-weight: bold;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.price-follow-tag-icon {
|
||||
display: inline-block;
|
||||
height: 0.8rem;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
|
||||
#quick-watch-processor-type {
|
||||
color: #fff;
|
||||
ul {
|
||||
padding: 0.3rem;
|
||||
|
||||
li {
|
||||
list-style: none;
|
||||
font-size: 0.8rem;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
.restock-label {
|
||||
&.in-stock {
|
||||
background-color: var(--color-background-button-green);
|
||||
color: #fff;
|
||||
}
|
||||
&.not-in-stock {
|
||||
background-color: var(--color-background-button-cancel);
|
||||
color: #777;
|
||||
}
|
||||
padding: 3px;
|
||||
border-radius: 3px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
@@ -1,20 +1,27 @@
|
||||
from distutils.util import strtobool
|
||||
|
||||
from flask import (
|
||||
flash
|
||||
)
|
||||
|
||||
from . model import App, Watch
|
||||
from copy import deepcopy, copy
|
||||
from os import path, unlink
|
||||
from threading import Lock
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import uuid as uuid_builder
|
||||
from copy import deepcopy
|
||||
from os import path, unlink
|
||||
from threading import Lock
|
||||
import re
|
||||
import requests
|
||||
import secrets
|
||||
import threading
|
||||
import time
|
||||
import uuid as uuid_builder
|
||||
|
||||
from . model import App, Watch
|
||||
# Because the server will run as a daemon and wont know the URL for notification links when firing off a notification
|
||||
BASE_URL_NOT_SET_TEXT = '("Base URL" not set - see settings - notifications)'
|
||||
|
||||
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
|
||||
|
||||
# Is there an existing library to ensure some data store (JSON etc) is in sync with CRUD methods?
|
||||
# Open a github issue if you know something :)
|
||||
@@ -35,8 +42,8 @@ class ChangeDetectionStore:
|
||||
self.__data = App.model()
|
||||
self.datastore_path = datastore_path
|
||||
self.json_store_path = "{}/url-watches.json".format(self.datastore_path)
|
||||
print(">>> Datastore path is ", self.json_store_path)
|
||||
self.needs_write = False
|
||||
self.proxy_list = None
|
||||
self.start_time = time.time()
|
||||
self.stop_thread = False
|
||||
# Base definition for all watchers
|
||||
@@ -78,10 +85,10 @@ class ChangeDetectionStore:
|
||||
self.__data['watching'][uuid] = Watch.model(datastore_path=self.datastore_path, default=watch)
|
||||
print("Watching:", uuid, self.__data['watching'][uuid]['url'])
|
||||
|
||||
# First time ran, doesnt exist.
|
||||
except (FileNotFoundError, json.decoder.JSONDecodeError):
|
||||
# First time ran, Create the datastore.
|
||||
except (FileNotFoundError):
|
||||
if include_default_watches:
|
||||
print("Creating JSON store at", self.datastore_path)
|
||||
print("No JSON DB found at {}, creating JSON store at {}".format(self.json_store_path, self.datastore_path))
|
||||
self.add_watch(url='https://news.ycombinator.com/',
|
||||
tag='Tech news',
|
||||
extras={'fetch_backend': 'html_requests'})
|
||||
@@ -90,8 +97,18 @@ class ChangeDetectionStore:
|
||||
tag='changedetection.io',
|
||||
extras={'fetch_backend': 'html_requests'})
|
||||
|
||||
updates_available = self.get_updates_available()
|
||||
self.__data['settings']['application']['schema_version'] = updates_available.pop()
|
||||
|
||||
else:
|
||||
# Bump the update version by running updates
|
||||
self.run_updates()
|
||||
|
||||
self.__data['version_tag'] = version_tag
|
||||
|
||||
# Just to test that proxies.json if it exists, doesnt throw a parsing error on startup
|
||||
test_list = self.proxy_list
|
||||
|
||||
# Helper to remove password protection
|
||||
password_reset_lockfile = "{}/removepassword.lock".format(self.datastore_path)
|
||||
if path.isfile(password_reset_lockfile):
|
||||
@@ -116,14 +133,6 @@ class ChangeDetectionStore:
|
||||
secret = secrets.token_hex(16)
|
||||
self.__data['settings']['application']['api_access_token'] = secret
|
||||
|
||||
# Proxy list support - available as a selection in settings when text file is imported
|
||||
proxy_list_file = "{}/proxies.json".format(self.datastore_path)
|
||||
if path.isfile(proxy_list_file):
|
||||
self.import_proxy_list(proxy_list_file)
|
||||
|
||||
# Bump the update version by running updates
|
||||
self.run_updates()
|
||||
|
||||
self.needs_write = True
|
||||
|
||||
# Finally start the thread that will manage periodic data saves to JSON
|
||||
@@ -175,66 +184,50 @@ class ChangeDetectionStore:
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
has_unviewed = False
|
||||
for uuid, watch in self.__data['watching'].items():
|
||||
# #106 - Be sure this is None on empty string, False, None, etc
|
||||
# Default var for fetch_backend
|
||||
# @todo this may not be needed anymore, or could be easily removed
|
||||
if not self.__data['watching'][uuid]['fetch_backend']:
|
||||
self.__data['watching'][uuid]['fetch_backend'] = self.__data['settings']['application']['fetch_backend']
|
||||
# Re #152, Return env base_url if not overriden
|
||||
# Re #148 - Some people have just {{ base_url }} in the body or title, but this may break some notification services
|
||||
# like 'Join', so it's always best to atleast set something obvious so that they are not broken.
|
||||
|
||||
# Re #152, Return env base_url if not overriden, @todo also prefer the proxy pass url
|
||||
env_base_url = os.getenv('BASE_URL','')
|
||||
if not self.__data['settings']['application']['base_url']:
|
||||
self.__data['settings']['application']['base_url'] = env_base_url.strip('" ')
|
||||
active_base_url = BASE_URL_NOT_SET_TEXT
|
||||
if self.__data['settings']['application'].get('base_url'):
|
||||
active_base_url = self.__data['settings']['application'].get('base_url')
|
||||
elif os.getenv('BASE_URL'):
|
||||
active_base_url = os.getenv('BASE_URL')
|
||||
|
||||
return self.__data
|
||||
|
||||
def get_all_tags(self):
|
||||
tags = []
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
if watch['tag'] is None:
|
||||
continue
|
||||
# Support for comma separated list of tags.
|
||||
for tag in watch['tag'].split(','):
|
||||
tag = tag.strip()
|
||||
if tag not in tags:
|
||||
tags.append(tag)
|
||||
|
||||
tags.sort()
|
||||
return tags
|
||||
|
||||
def unlink_history_file(self, path):
|
||||
try:
|
||||
unlink(path)
|
||||
except (FileNotFoundError, IOError):
|
||||
pass
|
||||
# I looked at various ways todo the following, but in the end just copying the dict seemed simplest/most reliable
|
||||
# even given the memory tradeoff - if you know a better way.. maybe return d|self.__data.. or something
|
||||
d = self.__data
|
||||
d['settings']['application']['active_base_url'] = active_base_url.strip('" ')
|
||||
return d
|
||||
|
||||
# Delete a single watch by UUID
|
||||
def delete(self, uuid):
|
||||
import pathlib
|
||||
import shutil
|
||||
|
||||
with self.lock:
|
||||
if uuid == 'all':
|
||||
self.__data['watching'] = {}
|
||||
|
||||
# GitHub #30 also delete history records
|
||||
for uuid in self.data['watching']:
|
||||
for path in self.data['watching'][uuid].history.values():
|
||||
self.unlink_history_file(path)
|
||||
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
|
||||
if os.path.exists(path):
|
||||
shutil.rmtree(path)
|
||||
|
||||
else:
|
||||
for path in self.data['watching'][uuid].history.values():
|
||||
self.unlink_history_file(path)
|
||||
|
||||
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
|
||||
if os.path.exists(path):
|
||||
shutil.rmtree(path)
|
||||
del self.data['watching'][uuid]
|
||||
|
||||
self.needs_write_urgent = True
|
||||
self.needs_write_urgent = True
|
||||
|
||||
# Clone a watch by UUID
|
||||
def clone(self, uuid):
|
||||
url = self.data['watching'][uuid]['url']
|
||||
tag = self.data['watching'][uuid]['tag']
|
||||
url = self.data['watching'][uuid].get('url')
|
||||
extras = self.data['watching'][uuid]
|
||||
new_uuid = self.add_watch(url=url, tag=tag, extras=extras)
|
||||
new_uuid = self.add_watch(url=url, extras=extras)
|
||||
return new_uuid
|
||||
|
||||
def url_exists(self, url):
|
||||
@@ -250,12 +243,19 @@ class ChangeDetectionStore:
|
||||
def clear_watch_history(self, uuid):
|
||||
import pathlib
|
||||
|
||||
self.__data['watching'][uuid].update(
|
||||
{'last_checked': 0,
|
||||
'last_viewed': 0,
|
||||
'previous_md5': False,
|
||||
'last_notification_error': False,
|
||||
'last_error': False})
|
||||
self.__data['watching'][uuid].update({
|
||||
'browser_steps_last_error_step' : None,
|
||||
'check_count': 0,
|
||||
'fetch_time' : 0.0,
|
||||
'has_ldjson_price_data': None,
|
||||
'last_checked': 0,
|
||||
'last_error': False,
|
||||
'last_notification_error': False,
|
||||
'last_viewed': 0,
|
||||
'previous_md5': False,
|
||||
'previous_md5_before_filters': False,
|
||||
'track_ldjson_price_data': None,
|
||||
})
|
||||
|
||||
# JSON Data, Screenshots, Textfiles (history index and snapshots), HTML in the future etc
|
||||
for item in pathlib.Path(os.path.join(self.datastore_path, uuid)).rglob("*.*"):
|
||||
@@ -266,16 +266,14 @@ class ChangeDetectionStore:
|
||||
|
||||
self.needs_write_urgent = True
|
||||
|
||||
def add_watch(self, url, tag="", extras=None, write_to_disk_now=True):
|
||||
def add_watch(self, url, tag='', extras=None, tag_uuids=None, write_to_disk_now=True):
|
||||
|
||||
if extras is None:
|
||||
extras = {}
|
||||
# should always be str
|
||||
if tag is None or not tag:
|
||||
tag = ''
|
||||
|
||||
# Incase these are copied across, assume it's a reference and deepcopy()
|
||||
apply_extras = deepcopy(extras)
|
||||
apply_extras['tags'] = [] if not apply_extras.get('tags') else apply_extras.get('tags')
|
||||
|
||||
# Was it a share link? try to fetch the data
|
||||
if (url.startswith("https://changedetection.io/share/")):
|
||||
@@ -289,6 +287,7 @@ class ChangeDetectionStore:
|
||||
# List of permissible attributes we accept from the wild internet
|
||||
for k in [
|
||||
'body',
|
||||
'browser_steps',
|
||||
'css_filter',
|
||||
'extract_text',
|
||||
'extract_title_as_title',
|
||||
@@ -298,8 +297,10 @@ class ChangeDetectionStore:
|
||||
'method',
|
||||
'paused',
|
||||
'previous_md5',
|
||||
'processor',
|
||||
'subtractive_selectors',
|
||||
'tag',
|
||||
'tags',
|
||||
'text_should_not_be_present',
|
||||
'title',
|
||||
'trigger_text',
|
||||
@@ -317,30 +318,50 @@ class ChangeDetectionStore:
|
||||
logging.error("Error fetching metadata for shared watch link", url, str(e))
|
||||
flash("Error fetching metadata for {}".format(url), 'error')
|
||||
return False
|
||||
from .model.Watch import is_safe_url
|
||||
if not is_safe_url(url):
|
||||
flash('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX', 'error')
|
||||
return None
|
||||
|
||||
with self.lock:
|
||||
if tag and type(tag) == str:
|
||||
# Then it's probably a string of the actual tag by name, split and add it
|
||||
for t in tag.split(','):
|
||||
# for each stripped tag, add tag as UUID
|
||||
for a_t in t.split(','):
|
||||
tag_uuid = self.add_tag(a_t)
|
||||
apply_extras['tags'].append(tag_uuid)
|
||||
|
||||
# #Re 569
|
||||
new_watch = Watch.model(datastore_path=self.datastore_path, default={
|
||||
'url': url,
|
||||
'tag': tag
|
||||
})
|
||||
# Or if UUIDs given directly
|
||||
if tag_uuids:
|
||||
apply_extras['tags'] = list(set(apply_extras['tags'] + tag_uuids))
|
||||
|
||||
new_uuid = new_watch['uuid']
|
||||
logging.debug("Added URL {} - {}".format(url, new_uuid))
|
||||
# Make any uuids unique
|
||||
if apply_extras.get('tags'):
|
||||
apply_extras['tags'] = list(set(apply_extras.get('tags')))
|
||||
|
||||
for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']:
|
||||
if k in apply_extras:
|
||||
del apply_extras[k]
|
||||
new_watch = Watch.model(datastore_path=self.datastore_path, url=url)
|
||||
|
||||
new_watch.update(apply_extras)
|
||||
self.__data['watching'][new_uuid] = new_watch
|
||||
new_uuid = new_watch.get('uuid')
|
||||
|
||||
logging.debug("Added URL {} - {}".format(url, new_uuid))
|
||||
|
||||
for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']:
|
||||
if k in apply_extras:
|
||||
del apply_extras[k]
|
||||
|
||||
if not apply_extras.get('date_created'):
|
||||
apply_extras['date_created'] = int(time.time())
|
||||
|
||||
new_watch.update(apply_extras)
|
||||
new_watch.ensure_data_dir_exists()
|
||||
self.__data['watching'][new_uuid] = new_watch
|
||||
|
||||
self.__data['watching'][new_uuid].ensure_data_dir_exists()
|
||||
|
||||
if write_to_disk_now:
|
||||
self.sync_to_json()
|
||||
|
||||
print("added ", url)
|
||||
|
||||
return new_uuid
|
||||
|
||||
def visualselector_data_is_ready(self, watch_uuid):
|
||||
@@ -368,28 +389,25 @@ class ChangeDetectionStore:
|
||||
f.write(screenshot)
|
||||
f.close()
|
||||
|
||||
# Make a JPEG that's used in notifications (due to being a smaller size) available
|
||||
from PIL import Image
|
||||
im1 = Image.open(target_path)
|
||||
im1.convert('RGB').save(target_path.replace('.png','.jpg'), quality=int(os.getenv("NOTIFICATION_SCREENSHOT_JPG_QUALITY", 75)))
|
||||
|
||||
|
||||
def save_error_text(self, watch_uuid, contents):
|
||||
if not self.data['watching'].get(watch_uuid):
|
||||
return
|
||||
target_path = os.path.join(self.datastore_path, watch_uuid, "last-error.txt")
|
||||
|
||||
self.data['watching'][watch_uuid].ensure_data_dir_exists()
|
||||
target_path = os.path.join(self.datastore_path, watch_uuid, "last-error.txt")
|
||||
with open(target_path, 'w') as f:
|
||||
f.write(contents)
|
||||
|
||||
def save_xpath_data(self, watch_uuid, data, as_error=False):
|
||||
|
||||
if not self.data['watching'].get(watch_uuid):
|
||||
return
|
||||
if as_error:
|
||||
target_path = os.path.join(self.datastore_path, watch_uuid, "elements-error.json")
|
||||
else:
|
||||
target_path = os.path.join(self.datastore_path, watch_uuid, "elements.json")
|
||||
|
||||
self.data['watching'][watch_uuid].ensure_data_dir_exists()
|
||||
with open(target_path, 'w') as f:
|
||||
f.write(json.dumps(data))
|
||||
f.close()
|
||||
@@ -459,10 +477,30 @@ class ChangeDetectionStore:
|
||||
print ("Removing",item)
|
||||
unlink(item)
|
||||
|
||||
def import_proxy_list(self, filename):
|
||||
with open(filename) as f:
|
||||
self.proxy_list = json.load(f)
|
||||
print ("Registered proxy list", list(self.proxy_list.keys()))
|
||||
@property
|
||||
def proxy_list(self):
|
||||
proxy_list = {}
|
||||
proxy_list_file = os.path.join(self.datastore_path, 'proxies.json')
|
||||
|
||||
# Load from external config file
|
||||
if path.isfile(proxy_list_file):
|
||||
with open("{}/proxies.json".format(self.datastore_path)) as f:
|
||||
proxy_list = json.load(f)
|
||||
|
||||
# Mapping from UI config if available
|
||||
extras = self.data['settings']['requests'].get('extra_proxies')
|
||||
if extras:
|
||||
i=0
|
||||
for proxy in extras:
|
||||
i += 0
|
||||
if proxy.get('proxy_name') and proxy.get('proxy_url'):
|
||||
k = "ui-" + str(i) + proxy.get('proxy_name')
|
||||
proxy_list[k] = {'label': proxy.get('proxy_name'), 'url': proxy.get('proxy_url')}
|
||||
|
||||
if proxy_list and strtobool(os.getenv('ENABLE_NO_PROXY_OPTION', 'True')):
|
||||
proxy_list["no-proxy"] = {'label': "No proxy", 'url': ''}
|
||||
|
||||
return proxy_list if len(proxy_list) else None
|
||||
|
||||
|
||||
def get_preferred_proxy_for_watch(self, uuid):
|
||||
@@ -472,13 +510,15 @@ class ChangeDetectionStore:
|
||||
:return: proxy "key" id
|
||||
"""
|
||||
|
||||
proxy_id = None
|
||||
if self.proxy_list is None:
|
||||
return None
|
||||
|
||||
# If its a valid one
|
||||
# If it's a valid one
|
||||
watch = self.data['watching'].get(uuid)
|
||||
|
||||
if strtobool(os.getenv('ENABLE_NO_PROXY_OPTION', 'True')) and watch.get('proxy') == "no-proxy":
|
||||
return None
|
||||
|
||||
if watch.get('proxy') and watch.get('proxy') in list(self.proxy_list.keys()):
|
||||
return watch.get('proxy')
|
||||
|
||||
@@ -489,21 +529,115 @@ class ChangeDetectionStore:
|
||||
if self.proxy_list.get(system_proxy_id):
|
||||
return system_proxy_id
|
||||
|
||||
# Fallback - Did not resolve anything, use the first available
|
||||
if system_proxy_id is None:
|
||||
|
||||
# Fallback - Did not resolve anything, or doesnt exist, use the first available
|
||||
if system_proxy_id is None or not self.proxy_list.get(system_proxy_id):
|
||||
first_default = list(self.proxy_list)[0]
|
||||
return first_default
|
||||
|
||||
return None
|
||||
|
||||
# Run all updates
|
||||
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
|
||||
# So therefor - each `update_n` should be very careful about checking if it needs to actually run
|
||||
# Probably we should bump the current update schema version with each tag release version?
|
||||
def run_updates(self):
|
||||
import inspect
|
||||
import shutil
|
||||
@property
|
||||
def has_extra_headers_file(self):
|
||||
filepath = os.path.join(self.datastore_path, 'headers.txt')
|
||||
return os.path.isfile(filepath)
|
||||
|
||||
def get_all_base_headers(self):
|
||||
from .model.App import parse_headers_from_text_file
|
||||
headers = {}
|
||||
# Global app settings
|
||||
headers.update(self.data['settings'].get('headers', {}))
|
||||
|
||||
return headers
|
||||
|
||||
def get_all_headers_in_textfile_for_watch(self, uuid):
|
||||
from .model.App import parse_headers_from_text_file
|
||||
headers = {}
|
||||
|
||||
# Global in /datastore/headers.txt
|
||||
filepath = os.path.join(self.datastore_path, 'headers.txt')
|
||||
try:
|
||||
if os.path.isfile(filepath):
|
||||
headers.update(parse_headers_from_text_file(filepath))
|
||||
except Exception as e:
|
||||
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||
|
||||
watch = self.data['watching'].get(uuid)
|
||||
if watch:
|
||||
|
||||
# In /datastore/xyz-xyz/headers.txt
|
||||
filepath = os.path.join(watch.watch_data_dir, 'headers.txt')
|
||||
try:
|
||||
if os.path.isfile(filepath):
|
||||
headers.update(parse_headers_from_text_file(filepath))
|
||||
except Exception as e:
|
||||
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||
|
||||
# In /datastore/tag-name.txt
|
||||
tags = self.get_all_tags_for_watch(uuid=uuid)
|
||||
for tag_uuid, tag in tags.items():
|
||||
fname = "headers-"+re.sub(r'[\W_]', '', tag.get('title')).lower().strip() + ".txt"
|
||||
filepath = os.path.join(self.datastore_path, fname)
|
||||
try:
|
||||
if os.path.isfile(filepath):
|
||||
headers.update(parse_headers_from_text_file(filepath))
|
||||
except Exception as e:
|
||||
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||
|
||||
return headers
|
||||
|
||||
def get_tag_overrides_for_watch(self, uuid, attr):
|
||||
tags = self.get_all_tags_for_watch(uuid=uuid)
|
||||
ret = []
|
||||
|
||||
if tags:
|
||||
for tag_uuid, tag in tags.items():
|
||||
if attr in tag and tag[attr]:
|
||||
ret=[*ret, *tag[attr]]
|
||||
|
||||
return ret
|
||||
|
||||
def add_tag(self, name):
|
||||
# If name exists, return that
|
||||
n = name.strip().lower()
|
||||
print (f">>> Adding new tag - '{n}'")
|
||||
if not n:
|
||||
return False
|
||||
|
||||
for uuid, tag in self.__data['settings']['application'].get('tags', {}).items():
|
||||
if n == tag.get('title', '').lower().strip():
|
||||
print (f">>> Tag {name} already exists")
|
||||
return uuid
|
||||
|
||||
# Eventually almost everything todo with a watch will apply as a Tag
|
||||
# So we use the same model as a Watch
|
||||
with self.lock:
|
||||
new_tag = Watch.model(datastore_path=self.datastore_path, default={
|
||||
'title': name.strip(),
|
||||
'date_created': int(time.time())
|
||||
})
|
||||
|
||||
new_uuid = new_tag.get('uuid')
|
||||
|
||||
self.__data['settings']['application']['tags'][new_uuid] = new_tag
|
||||
|
||||
return new_uuid
|
||||
|
||||
def get_all_tags_for_watch(self, uuid):
|
||||
"""This should be in Watch model but Watch doesn't have access to datastore, not sure how to solve that yet"""
|
||||
watch = self.data['watching'].get(uuid)
|
||||
|
||||
# Should return a dict of full tag info linked by UUID
|
||||
if watch:
|
||||
return dictfilt(self.__data['settings']['application']['tags'], watch.get('tags', []))
|
||||
|
||||
return {}
|
||||
|
||||
def tag_exists_by_name(self, tag_name):
|
||||
return any(v.get('title', '').lower() == tag_name.lower() for k, v in self.__data['settings']['application']['tags'].items())
|
||||
|
||||
def get_updates_available(self):
|
||||
import inspect
|
||||
updates_available = []
|
||||
for i, o in inspect.getmembers(self, predicate=inspect.ismethod):
|
||||
m = re.search(r'update_(\d+)$', i)
|
||||
@@ -511,6 +645,15 @@ class ChangeDetectionStore:
|
||||
updates_available.append(int(m.group(1)))
|
||||
updates_available.sort()
|
||||
|
||||
return updates_available
|
||||
|
||||
# Run all updates
|
||||
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
|
||||
# So therefor - each `update_n` should be very careful about checking if it needs to actually run
|
||||
# Probably we should bump the current update schema version with each tag release version?
|
||||
def run_updates(self):
|
||||
import shutil
|
||||
updates_available = self.get_updates_available()
|
||||
for update_n in updates_available:
|
||||
if update_n > self.__data['settings']['application']['schema_version']:
|
||||
print ("Applying update_{}".format((update_n)))
|
||||
@@ -620,4 +763,76 @@ class ChangeDetectionStore:
|
||||
watch['include_filters'] = [existing_filter]
|
||||
except:
|
||||
continue
|
||||
return
|
||||
|
||||
# Convert old static notification tokens to jinja2 tokens
|
||||
def update_9(self):
|
||||
# Each watch
|
||||
import re
|
||||
# only { } not {{ or }}
|
||||
r = r'(?<!{){(?!{)(\w+)(?<!})}(?!})'
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
try:
|
||||
n_body = watch.get('notification_body', '')
|
||||
if n_body:
|
||||
watch['notification_body'] = re.sub(r, r'{{\1}}', n_body)
|
||||
|
||||
n_title = watch.get('notification_title')
|
||||
if n_title:
|
||||
watch['notification_title'] = re.sub(r, r'{{\1}}', n_title)
|
||||
|
||||
n_urls = watch.get('notification_urls')
|
||||
if n_urls:
|
||||
for i, url in enumerate(n_urls):
|
||||
watch['notification_urls'][i] = re.sub(r, r'{{\1}}', url)
|
||||
|
||||
except:
|
||||
continue
|
||||
|
||||
# System wide
|
||||
n_body = self.data['settings']['application'].get('notification_body')
|
||||
if n_body:
|
||||
self.data['settings']['application']['notification_body'] = re.sub(r, r'{{\1}}', n_body)
|
||||
|
||||
n_title = self.data['settings']['application'].get('notification_title')
|
||||
if n_body:
|
||||
self.data['settings']['application']['notification_title'] = re.sub(r, r'{{\1}}', n_title)
|
||||
|
||||
n_urls = self.data['settings']['application'].get('notification_urls')
|
||||
if n_urls:
|
||||
for i, url in enumerate(n_urls):
|
||||
self.data['settings']['application']['notification_urls'][i] = re.sub(r, r'{{\1}}', url)
|
||||
|
||||
return
|
||||
|
||||
# Some setups may have missed the correct default, so it shows the wrong config in the UI, although it will default to system-wide
|
||||
def update_10(self):
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
try:
|
||||
if not watch.get('fetch_backend', ''):
|
||||
watch['fetch_backend'] = 'system'
|
||||
except:
|
||||
continue
|
||||
return
|
||||
|
||||
# Create tag objects and their references from existing tag text
|
||||
def update_12(self):
|
||||
i = 0
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
# Split out and convert old tag string
|
||||
tag = watch.get('tag')
|
||||
if tag:
|
||||
tag_uuids = []
|
||||
for t in tag.split(','):
|
||||
tag_uuids.append(self.add_tag(name=t))
|
||||
|
||||
self.data['watching'][uuid]['tags'] = tag_uuids
|
||||
|
||||
# #1775 - Update 11 did not update the records correctly when adding 'date_created' values for sorting
|
||||
def update_13(self):
|
||||
i = 0
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
if not watch.get('date_created'):
|
||||
self.data['watching'][uuid]['date_created'] = i
|
||||
i+=1
|
||||
return
|
||||
@@ -13,17 +13,20 @@
|
||||
<div class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Use <a target=_new href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service! <i><a target=_new href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes">Please read the notification services wiki here for important configuration notes</a></i>.</li>
|
||||
<li><code>discord://</code> only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li>
|
||||
<li><code>tgram://</code> bots cant send messages to other bots, so you should specify chat ID of non-bot user.</li>
|
||||
<li><code>tgram://</code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li>
|
||||
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_discord">discord://</a></code> (or <code>https://discord.com/api/webhooks...</code>)) </code> only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li>
|
||||
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> bots can't send messages to other bots, so you should specify chat ID of non-bot user.</li>
|
||||
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li>
|
||||
<li><code>gets://</code>, <code>posts://</code>, <code>puts://</code>, <code>deletes://</code> for direct API calls (or omit the "<code>s</code>" for non-SSL ie <code>get://</code>)</li>
|
||||
<li>Accepts the <code>{{ '{{token}}' }}</code> placeholders listed below</li>
|
||||
</ul>
|
||||
</div>
|
||||
<br/>
|
||||
<a id="send-test-notification" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Send test notification</a>
|
||||
{% if emailprefix %}
|
||||
<a id="add-email-helper" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Add email</a>
|
||||
{% endif %}
|
||||
<a href="{{url_for('notification_logs')}}" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Notification debug logs</a>
|
||||
<div class="notifications-wrapper">
|
||||
<a id="send-test-notification" class="pure-button button-secondary button-xsmall" >Send test notification</a>
|
||||
{% if emailprefix %}
|
||||
<a id="add-email-helper" class="pure-button button-secondary button-xsmall" >Add email <img style="height: 1em; display: inline-block" src="{{url_for('static_content', group='images', filename='email.svg')}}" alt="Add an email address"> </a>
|
||||
{% endif %}
|
||||
<a href="{{url_for('notification_logs')}}" class="pure-button button-secondary button-xsmall" >Notification debug logs</a>
|
||||
</div>
|
||||
</div>
|
||||
<div id="notification-customisation" class="pure-control-group">
|
||||
<div class="pure-control-group">
|
||||
@@ -32,17 +35,14 @@
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_body , rows=5, class="notification-body", placeholder=settings_application['notification_body']) }}
|
||||
<span class="pure-form-message-inline">Body for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
<!-- unsure -->
|
||||
{{ render_field(form.notification_format , class="notification-format") }}
|
||||
<span class="pure-form-message-inline">Format for all notifications</span>
|
||||
<span class="pure-form-message-inline">Body for all notifications ‐ You can use <a target="_new" href="https://jinja.palletsprojects.com/en/3.0.x/templates/">Jinja2</a> templating in the notification title, body and URL, and tokens from below.
|
||||
</span>
|
||||
|
||||
</div>
|
||||
<div class="pure-controls">
|
||||
<span class="pure-form-message-inline">
|
||||
These tokens can be used in the notification body and title to customise the notification text.
|
||||
|
||||
<div id="notification-token-toggle" class="pure-button button-tag button-xsmall">Show token/placeholders</div>
|
||||
</div>
|
||||
<div class="pure-controls" style="display: none;" id="notification-tokens-info">
|
||||
<table class="pure-table" id="token-table">
|
||||
<thead>
|
||||
<tr>
|
||||
@@ -52,52 +52,74 @@
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><code>{base_url}</code></td>
|
||||
<td><code>{{ '{{base_url}}' }}</code></td>
|
||||
<td>The URL of the changedetection.io instance you are running.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_url}</code></td>
|
||||
<td><code>{{ '{{watch_url}}' }}</code></td>
|
||||
<td>The URL being watched.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_uuid}</code></td>
|
||||
<td><code>{{ '{{watch_uuid}}' }}</code></td>
|
||||
<td>The UUID of the watch.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_title}</code></td>
|
||||
<td><code>{{ '{{watch_title}}' }}</code></td>
|
||||
<td>The title of the watch.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_tag}</code></td>
|
||||
<td>The tag of the watch.</td>
|
||||
<td><code>{{ '{{watch_tag}}' }}</code></td>
|
||||
<td>The watch label / tag</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{preview_url}</code></td>
|
||||
<td><code>{{ '{{preview_url}}' }}</code></td>
|
||||
<td>The URL of the preview page generated by changedetection.io.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{diff}</code></td>
|
||||
<td>The diff output - differences only</td>
|
||||
<td><code>{{ '{{diff_url}}' }}</code></td>
|
||||
<td>The URL of the diff output for the watch.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{diff}}' }}</code></td>
|
||||
<td>The diff output - only changes, additions, and removals</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{diff_added}}' }}</code></td>
|
||||
<td>The diff output - only changes and additions</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{diff_removed}}' }}</code></td>
|
||||
<td>The diff output - only changes and removals</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{diff_full}</code></td>
|
||||
<td><code>{{ '{{diff_full}}' }}</code></td>
|
||||
<td>The diff output - full difference output</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{diff_url}</code></td>
|
||||
<td>The URL of the diff page generated by changedetection.io.</td>
|
||||
<td><code>{{ '{{diff_patch}}' }}</code></td>
|
||||
<td>The diff output - patch in unified format</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{current_snapshot}</code></td>
|
||||
<td>The current snapshot value, useful when combined with JSON or CSS filters
|
||||
<td><code>{{ '{{current_snapshot}}' }}</code></td>
|
||||
<td>The current snapshot text contents value, useful when combined with JSON or CSS filters
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{triggered_text}}' }}</code></td>
|
||||
<td>Text that tripped the trigger from filters</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<br/>
|
||||
URLs generated by changedetection.io (such as <code>{diff_url}</code>) require the <code>BASE_URL</code> environment variable set.<br/>
|
||||
Your <code>BASE_URL</code> var is currently "{{settings_application['current_base_url']}}"
|
||||
</span>
|
||||
<div class="pure-form-message-inline">
|
||||
<p>
|
||||
Warning: Contents of <code>{{ '{{diff}}' }}</code>, <code>{{ '{{diff_removed}}' }}</code>, and <code>{{ '{{diff_added}}' }}</code> depend on how the difference algorithm perceives the change. <br>
|
||||
For example, an addition or removal could be perceived as a change in some cases. <a target="_new" href="https://github.com/dgtlmoon/changedetection.io/wiki/Using-the-%7B%7Bdiff%7D%7D,-%7B%7Bdiff_added%7D%7D,-and-%7B%7Bdiff_removed%7D%7D-notification-tokens">More Here</a> <br>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_format , class="notification-format") }}
|
||||
<span class="pure-form-message-inline">Format for all notifications</span>
|
||||
</div>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||