mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-11-04 16:45:57 +00:00
Compare commits
134 Commits
check-pip-
...
0.45.3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
59578803bf | ||
|
|
a5db3a0b99 | ||
|
|
49a5337ac4 | ||
|
|
ceac8c21e4 | ||
|
|
a7132b1cfc | ||
|
|
2b948c15c1 | ||
|
|
34f2d30968 | ||
|
|
700729a332 | ||
|
|
b6060ac90c | ||
|
|
5cccccb0b6 | ||
|
|
c52eb512e8 | ||
|
|
7282df9c08 | ||
|
|
e30b17b8bc | ||
|
|
1e88136325 | ||
|
|
57de4ffe4f | ||
|
|
51e2e8a226 | ||
|
|
8887459462 | ||
|
|
460c724e51 | ||
|
|
dcf4bf37ed | ||
|
|
e3cf22fc27 | ||
|
|
d497db639e | ||
|
|
7355ac8d21 | ||
|
|
2f2d0ea0f2 | ||
|
|
a958e1fe20 | ||
|
|
5dc3b00ec6 | ||
|
|
8ac4757cd9 | ||
|
|
2180bb256d | ||
|
|
212f15ad5f | ||
|
|
22b2068208 | ||
|
|
4916043055 | ||
|
|
7bf13bad30 | ||
|
|
0aa2276afb | ||
|
|
3b875e5a6a | ||
|
|
8ec50294d2 | ||
|
|
e3c9255d9e | ||
|
|
3b03bdcb82 | ||
|
|
e25792bcec | ||
|
|
bf4168a2aa | ||
|
|
9d37eaa57b | ||
|
|
40d01acde9 | ||
|
|
d34832de73 | ||
|
|
ed4bafae63 | ||
|
|
3a5bceadfa | ||
|
|
6abdf2d332 | ||
|
|
dee23709a9 | ||
|
|
52df3b10e7 | ||
|
|
087d21c61e | ||
|
|
171faf465c | ||
|
|
a3d8bd0b1a | ||
|
|
6ef8a1c18f | ||
|
|
126f0fbf87 | ||
|
|
cfa712c88c | ||
|
|
6a6ba40b6a | ||
|
|
e7f726c057 | ||
|
|
df0cc7b585 | ||
|
|
76cd98b521 | ||
|
|
f84ba0fb31 | ||
|
|
c35cbd33d6 | ||
|
|
661f7fe32c | ||
|
|
7cb7eebbc5 | ||
|
|
aaceb4ebad | ||
|
|
56cf6e5ea5 | ||
|
|
1987e109e8 | ||
|
|
20d65cdd26 | ||
|
|
37ff5f6d37 | ||
|
|
2f777ea3bb | ||
|
|
e709201955 | ||
|
|
572f71299f | ||
|
|
5f150c4f03 | ||
|
|
8cbf8e8f57 | ||
|
|
0e65dda5b6 | ||
|
|
72a415144b | ||
|
|
52f2c00308 | ||
|
|
72311fb845 | ||
|
|
f1b10a22f8 | ||
|
|
a4c620c308 | ||
|
|
9434eac72d | ||
|
|
edb5e20de6 | ||
|
|
e62eeb1c4a | ||
|
|
a4e6fd1ec3 | ||
|
|
d8b9f0fd78 | ||
|
|
f9387522ee | ||
|
|
ba8d2e0c2d | ||
|
|
247db22a33 | ||
|
|
aeabd5b3fc | ||
|
|
e9e1ce893f | ||
|
|
b5a415c7b6 | ||
|
|
9e954532d6 | ||
|
|
955835df72 | ||
|
|
1aeafef910 | ||
|
|
1367197df7 | ||
|
|
143971123d | ||
|
|
04d2d3fb00 | ||
|
|
236f0c098d | ||
|
|
582c6b465b | ||
|
|
a021ba87fa | ||
|
|
e9057cb851 | ||
|
|
72ec438caa | ||
|
|
367dec48e1 | ||
|
|
dd87912c88 | ||
|
|
0126cb0aac | ||
|
|
463b2d0449 | ||
|
|
e4f6d54ae2 | ||
|
|
5f338d7824 | ||
|
|
0b563a93ec | ||
|
|
d939882dde | ||
|
|
690cf4acc9 | ||
|
|
3cb3c7ba2e | ||
|
|
5325918f29 | ||
|
|
8eee913438 | ||
|
|
06921d973e | ||
|
|
316f28a0f2 | ||
|
|
3801d339f5 | ||
|
|
d814535dc6 | ||
|
|
cf3f3e4497 | ||
|
|
ba76c2a280 | ||
|
|
94f38f052e | ||
|
|
1710885fc4 | ||
|
|
2018e73240 | ||
|
|
fae8c89a4e | ||
|
|
40988c55c6 | ||
|
|
5aa713b7ea | ||
|
|
e1f5dfb703 | ||
|
|
966600d28e | ||
|
|
e7ac356d99 | ||
|
|
e874df4ffc | ||
|
|
d1f44d0345 | ||
|
|
8536af0845 | ||
|
|
9076ba6bd3 | ||
|
|
43af18e2bc | ||
|
|
ad75e8cdd0 | ||
|
|
f604643356 | ||
|
|
d5fd22f693 | ||
|
|
1d9d11b3f5 |
@@ -1,2 +1,18 @@
|
|||||||
.git
|
.git
|
||||||
.github
|
.github
|
||||||
|
changedetectionio/processors/__pycache__
|
||||||
|
changedetectionio/api/__pycache__
|
||||||
|
changedetectionio/model/__pycache__
|
||||||
|
changedetectionio/blueprint/price_data_follower/__pycache__
|
||||||
|
changedetectionio/blueprint/tags/__pycache__
|
||||||
|
changedetectionio/blueprint/__pycache__
|
||||||
|
changedetectionio/blueprint/browser_steps/__pycache__
|
||||||
|
changedetectionio/fetchers/__pycache__
|
||||||
|
changedetectionio/tests/visualselector/__pycache__
|
||||||
|
changedetectionio/tests/restock/__pycache__
|
||||||
|
changedetectionio/tests/__pycache__
|
||||||
|
changedetectionio/tests/fetchers/__pycache__
|
||||||
|
changedetectionio/tests/unit/__pycache__
|
||||||
|
changedetectionio/tests/proxy_list/__pycache__
|
||||||
|
changedetectionio/__pycache__
|
||||||
|
|
||||||
|
|||||||
4
.github/test/Dockerfile-alpine
vendored
4
.github/test/Dockerfile-alpine
vendored
@@ -2,7 +2,7 @@
|
|||||||
# Test that we can still build on Alpine (musl modified libc https://musl.libc.org/)
|
# Test that we can still build on Alpine (musl modified libc https://musl.libc.org/)
|
||||||
# Some packages wont install via pypi because they dont have a wheel available under this architecture.
|
# Some packages wont install via pypi because they dont have a wheel available under this architecture.
|
||||||
|
|
||||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.16
|
FROM ghcr.io/linuxserver/baseimage-alpine:3.18
|
||||||
ENV PYTHONUNBUFFERED=1
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
COPY requirements.txt /requirements.txt
|
COPY requirements.txt /requirements.txt
|
||||||
@@ -26,6 +26,6 @@ RUN \
|
|||||||
py3-pip && \
|
py3-pip && \
|
||||||
echo "**** pip3 install test of changedetection.io ****" && \
|
echo "**** pip3 install test of changedetection.io ****" && \
|
||||||
pip3 install -U pip wheel setuptools && \
|
pip3 install -U pip wheel setuptools && \
|
||||||
pip3 install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.16/ -r /requirements.txt && \
|
pip3 install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.18/ -r /requirements.txt && \
|
||||||
apk del --purge \
|
apk del --purge \
|
||||||
build-dependencies
|
build-dependencies
|
||||||
|
|||||||
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
@@ -30,11 +30,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v1
|
uses: github/codeql-action/init@v2
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
@@ -45,7 +45,7 @@ jobs:
|
|||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v1
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 https://git.io/JvXDl
|
# 📚 https://git.io/JvXDl
|
||||||
@@ -59,4 +59,4 @@ jobs:
|
|||||||
# make release
|
# make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v1
|
uses: github/codeql-action/analyze@v2
|
||||||
|
|||||||
26
.github/workflows/containers.yml
vendored
26
.github/workflows/containers.yml
vendored
@@ -39,11 +39,11 @@ jobs:
|
|||||||
# Or if we are in a tagged release scenario.
|
# Or if we are in a tagged release scenario.
|
||||||
if: ${{ github.event.workflow_run.conclusion == 'success' }} || ${{ github.event.release.tag_name }} != ''
|
if: ${{ github.event.workflow_run.conclusion == 'success' }} || ${{ github.event.release.tag_name }} != ''
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python 3.9
|
- name: Set up Python 3.11
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.11
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
@@ -58,27 +58,27 @@ jobs:
|
|||||||
echo ${{ github.ref }} > changedetectionio/tag.txt
|
echo ${{ github.ref }} > changedetectionio/tag.txt
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1
|
uses: docker/setup-qemu-action@v3
|
||||||
with:
|
with:
|
||||||
image: tonistiigi/binfmt:latest
|
image: tonistiigi/binfmt:latest
|
||||||
platforms: all
|
platforms: all
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Login to Docker Hub Container Registry
|
- name: Login to Docker Hub Container Registry
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
install: true
|
install: true
|
||||||
version: latest
|
version: latest
|
||||||
@@ -88,14 +88,14 @@ jobs:
|
|||||||
- name: Build and push :dev
|
- name: Build and push :dev
|
||||||
id: docker_build
|
id: docker_build
|
||||||
if: ${{ github.ref }} == "refs/heads/master"
|
if: ${{ github.ref }} == "refs/heads/master"
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: ./
|
context: ./
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
push: true
|
push: true
|
||||||
tags: |
|
tags: |
|
||||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev
|
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||||
cache-from: type=local,src=/tmp/.buildx-cache
|
cache-from: type=local,src=/tmp/.buildx-cache
|
||||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||||
# Looks like this was disabled
|
# Looks like this was disabled
|
||||||
@@ -105,7 +105,7 @@ jobs:
|
|||||||
- name: Build and push :tag
|
- name: Build and push :tag
|
||||||
id: docker_build_tag_release
|
id: docker_build_tag_release
|
||||||
if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.')
|
if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.')
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: ./
|
context: ./
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
@@ -115,7 +115,7 @@ jobs:
|
|||||||
ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }}
|
ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }}
|
||||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest
|
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest
|
||||||
ghcr.io/dgtlmoon/changedetection.io:latest
|
ghcr.io/dgtlmoon/changedetection.io:latest
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||||
cache-from: type=local,src=/tmp/.buildx-cache
|
cache-from: type=local,src=/tmp/.buildx-cache
|
||||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||||
# Looks like this was disabled
|
# Looks like this was disabled
|
||||||
@@ -125,7 +125,7 @@ jobs:
|
|||||||
run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}
|
run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}
|
||||||
|
|
||||||
- name: Cache Docker layers
|
- name: Cache Docker layers
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: /tmp/.buildx-cache
|
path: /tmp/.buildx-cache
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||||
|
|||||||
18
.github/workflows/test-container-build.yml
vendored
18
.github/workflows/test-container-build.yml
vendored
@@ -24,22 +24,22 @@ jobs:
|
|||||||
test-container-build:
|
test-container-build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python 3.9
|
- name: Set up Python 3.11
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.11
|
||||||
|
|
||||||
# Just test that the build works, some libraries won't compile on ARM/rPi etc
|
# Just test that the build works, some libraries won't compile on ARM/rPi etc
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1
|
uses: docker/setup-qemu-action@v3
|
||||||
with:
|
with:
|
||||||
image: tonistiigi/binfmt:latest
|
image: tonistiigi/binfmt:latest
|
||||||
platforms: all
|
platforms: all
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
install: true
|
install: true
|
||||||
version: latest
|
version: latest
|
||||||
@@ -49,7 +49,7 @@ jobs:
|
|||||||
# Check we can still build under alpine/musl
|
# Check we can still build under alpine/musl
|
||||||
- name: Test that the docker containers can build (musl via alpine check)
|
- name: Test that the docker containers can build (musl via alpine check)
|
||||||
id: docker_build_musl
|
id: docker_build_musl
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: ./
|
context: ./
|
||||||
file: ./.github/test/Dockerfile-alpine
|
file: ./.github/test/Dockerfile-alpine
|
||||||
@@ -57,12 +57,12 @@ jobs:
|
|||||||
|
|
||||||
- name: Test that the docker containers can build
|
- name: Test that the docker containers can build
|
||||||
id: docker_build
|
id: docker_build
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v5
|
||||||
# https://github.com/docker/build-push-action#customizing
|
# https://github.com/docker/build-push-action#customizing
|
||||||
with:
|
with:
|
||||||
context: ./
|
context: ./
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
platforms: linux/arm/v7,linux/arm/v6,linux/amd64,linux/arm64,
|
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||||
cache-from: type=local,src=/tmp/.buildx-cache
|
cache-from: type=local,src=/tmp/.buildx-cache
|
||||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||||
|
|
||||||
|
|||||||
35
.github/workflows/test-only.yml
vendored
35
.github/workflows/test-only.yml
vendored
@@ -7,13 +7,13 @@ jobs:
|
|||||||
test-application:
|
test-application:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
# Mainly just for link/flake8
|
# Mainly just for link/flake8
|
||||||
- name: Set up Python 3.10
|
- name: Set up Python 3.11
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.11'
|
||||||
|
|
||||||
- name: Lint with flake8
|
- name: Lint with flake8
|
||||||
run: |
|
run: |
|
||||||
@@ -30,16 +30,22 @@ jobs:
|
|||||||
|
|
||||||
# Selenium+browserless
|
# Selenium+browserless
|
||||||
docker run --network changedet-network -d --hostname selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome-debug:3.141.59
|
docker run --network changedet-network -d --hostname selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome-debug:3.141.59
|
||||||
docker run --network changedet-network -d --hostname browserless -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.53-chrome-stable
|
docker run --network changedet-network -d --hostname browserless -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.53-chrome-stable
|
||||||
|
|
||||||
- name: Build changedetection.io container for testing
|
- name: Build changedetection.io container for testing
|
||||||
run: |
|
run: |
|
||||||
# Build a changedetection.io container and start testing inside
|
# Build a changedetection.io container and start testing inside
|
||||||
docker build . -t test-changedetectionio
|
docker build . -t test-changedetectionio
|
||||||
|
# Debug info
|
||||||
|
docker run test-changedetectionio bash -c 'pip list'
|
||||||
|
|
||||||
|
- name: Spin up ancillary SMTP+Echo message test server
|
||||||
|
run: |
|
||||||
|
# Debug SMTP server/echo message back server
|
||||||
|
docker run --network changedet-network -d -p 11025:11025 -p 11080:11080 --hostname mailserver test-changedetectionio bash -c 'python changedetectionio/tests/smtp/smtp-test-server.py'
|
||||||
|
|
||||||
- name: Test built container with pytest
|
- name: Test built container with pytest
|
||||||
run: |
|
run: |
|
||||||
|
|
||||||
# Unit tests
|
# Unit tests
|
||||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_notification_diff'
|
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_notification_diff'
|
||||||
|
|
||||||
@@ -55,9 +61,24 @@ jobs:
|
|||||||
# Playwright/Browserless fetch
|
# Playwright/Browserless fetch
|
||||||
docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
|
docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
|
||||||
|
|
||||||
|
# Settings headers playwright tests - Call back in from Browserless, check headers
|
||||||
|
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||||
|
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "WEBDRIVER_URL=http://selenium:4444/wd/hub" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||||
|
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||||
|
|
||||||
# restock detection via playwright - added name=changedet here so that playwright/browserless can connect to it
|
# restock detection via playwright - added name=changedet here so that playwright/browserless can connect to it
|
||||||
docker run --rm --name "changedet" -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-port=5004 --live-server-host=0.0.0.0 tests/restock/test_restock.py'
|
docker run --rm --name "changedet" -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-port=5004 --live-server-host=0.0.0.0 tests/restock/test_restock.py'
|
||||||
|
|
||||||
|
- name: Test SMTP notification mime types
|
||||||
|
run: |
|
||||||
|
# SMTP content types - needs the 'Debug SMTP server/echo message back server' container from above
|
||||||
|
docker run --rm --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/smtp/test_notification_smtp.py'
|
||||||
|
|
||||||
|
- name: Test with puppeteer fetcher and disk cache
|
||||||
|
run: |
|
||||||
|
docker run --rm -e "PUPPETEER_DISK_CACHE=/tmp/data/" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
|
||||||
|
# Browserless would have had -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" added above
|
||||||
|
|
||||||
- name: Test proxy interaction
|
- name: Test proxy interaction
|
||||||
run: |
|
run: |
|
||||||
cd changedetectionio
|
cd changedetectionio
|
||||||
@@ -77,4 +98,4 @@ jobs:
|
|||||||
|
|
||||||
#export WEBDRIVER_URL=http://localhost:4444/wd/hub
|
#export WEBDRIVER_URL=http://localhost:4444/wd/hub
|
||||||
#pytest tests/fetchers/test_content.py
|
#pytest tests/fetchers/test_content.py
|
||||||
#pytest tests/test_errorhandling.py
|
#pytest tests/test_errorhandling.py
|
||||||
|
|||||||
8
.github/workflows/test-pip-build.yml
vendored
8
.github/workflows/test-pip-build.yml
vendored
@@ -11,12 +11,12 @@ jobs:
|
|||||||
test-pip-build-basics:
|
test-pip-build-basics:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Python 3.9
|
- name: Set up Python 3.11
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.11
|
||||||
|
|
||||||
|
|
||||||
- name: Test that the basic pip built package runs without error
|
- name: Test that the basic pip built package runs without error
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# pip dependencies install stage
|
# pip dependencies install stage
|
||||||
FROM python:3.10-slim as builder
|
FROM python:3.11-slim-bullseye as builder
|
||||||
|
|
||||||
# See `cryptography` pin comment in requirements.txt
|
# See `cryptography` pin comment in requirements.txt
|
||||||
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
|
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
|
||||||
@@ -29,7 +29,7 @@ RUN pip install --target=/dependencies playwright~=1.27.1 \
|
|||||||
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
|
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
|
||||||
|
|
||||||
# Final image stage
|
# Final image stage
|
||||||
FROM python:3.10-slim
|
FROM python:3.11-slim-bullseye
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
libssl1.1 \
|
libssl1.1 \
|
||||||
|
|||||||
@@ -13,3 +13,6 @@ include changedetection.py
|
|||||||
global-exclude *.pyc
|
global-exclude *.pyc
|
||||||
global-exclude node_modules
|
global-exclude node_modules
|
||||||
global-exclude venv
|
global-exclude venv
|
||||||
|
|
||||||
|
global-exclude test-datastore
|
||||||
|
global-exclude changedetection.io*dist-info
|
||||||
|
|||||||
@@ -2,19 +2,44 @@
|
|||||||
|
|
||||||
Live your data-life pro-actively, track website content changes and receive notifications via Discord, Email, Slack, Telegram and 70+ more
|
Live your data-life pro-actively, track website content changes and receive notifications via Discord, Email, Slack, Telegram and 70+ more
|
||||||
|
|
||||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start?src=pip)
|
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring, list of websites with changes" title="Self-hosted web page change monitoring, list of websites with changes" />](https://changedetection.io)
|
||||||
|
|
||||||
|
|
||||||
[**Don't have time? Let us host it for you! try our extremely affordable subscription use our proxies and support!**](https://lemonade.changedetection.io/start)
|
[**Don't have time? Let us host it for you! try our extremely affordable subscription use our proxies and support!**](https://changedetection.io)
|
||||||
|
|
||||||
|
|
||||||
#### Example use cases
|
### Target specific parts of the webpage using the Visual Selector tool.
|
||||||
|
|
||||||
|
Available when connected to a <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Playwright-content-fetcher">playwright content fetcher</a> (included as part of our subscription service)
|
||||||
|
|
||||||
|
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/visualselector-anim.gif" style="max-width:100%;" alt="Select parts and elements of a web page to monitor for changes" title="Select parts and elements of a web page to monitor for changes" />](https://changedetection.io?src=pip)
|
||||||
|
|
||||||
|
### Easily see what changed, examine by word, line, or individual character.
|
||||||
|
|
||||||
|
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot-diff.png" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />](https://changedetection.io?src=pip)
|
||||||
|
|
||||||
|
|
||||||
|
### Perform interactive browser steps
|
||||||
|
|
||||||
|
Fill in text boxes, click buttons and more, setup your changedetection scenario.
|
||||||
|
|
||||||
|
Using the **Browser Steps** configuration, add basic steps before performing change detection, such as logging into websites, adding a product to a cart, accept cookie logins, entering dates and refining searches.
|
||||||
|
|
||||||
|
[<img src="docs/browsersteps-anim.gif" style="max-width:100%;" alt="Website change detection with interactive browser steps, detect changes behind login and password, search queries and more" title="Website change detection with interactive browser steps, detect changes behind login and password, search queries and more" />](https://changedetection.io?src=pip)
|
||||||
|
|
||||||
|
After **Browser Steps** have been run, then visit the **Visual Selector** tab to refine the content you're interested in.
|
||||||
|
Requires Playwright to be enabled.
|
||||||
|
|
||||||
|
|
||||||
|
### Example use cases
|
||||||
|
|
||||||
- Products and services have a change in pricing
|
- Products and services have a change in pricing
|
||||||
- _Out of stock notification_ and _Back In stock notification_
|
- _Out of stock notification_ and _Back In stock notification_
|
||||||
|
- Monitor and track PDF file changes, know when a PDF file has text changes.
|
||||||
- Governmental department updates (changes are often only on their websites)
|
- Governmental department updates (changes are often only on their websites)
|
||||||
- New software releases, security advisories when you're not on their mailing list.
|
- New software releases, security advisories when you're not on their mailing list.
|
||||||
- Festivals with changes
|
- Festivals with changes
|
||||||
|
- Discogs restock alerts and monitoring
|
||||||
- Realestate listing changes
|
- Realestate listing changes
|
||||||
- Know when your favourite whiskey is on sale, or other special deals are announced before anyone else
|
- Know when your favourite whiskey is on sale, or other special deals are announced before anyone else
|
||||||
- COVID related news from government websites
|
- COVID related news from government websites
|
||||||
@@ -27,18 +52,34 @@ Live your data-life pro-actively, track website content changes and receive noti
|
|||||||
- Create RSS feeds based on changes in web content
|
- Create RSS feeds based on changes in web content
|
||||||
- Monitor HTML source code for unexpected changes, strengthen your PCI compliance
|
- Monitor HTML source code for unexpected changes, strengthen your PCI compliance
|
||||||
- You have a very sensitive list of URLs to watch and you do _not_ want to use the paid alternatives. (Remember, _you_ are the product)
|
- You have a very sensitive list of URLs to watch and you do _not_ want to use the paid alternatives. (Remember, _you_ are the product)
|
||||||
|
- Get notified when certain keywords appear in Twitter search results
|
||||||
|
- Proactively search for jobs, get notified when companies update their careers page, search job portals for keywords.
|
||||||
|
- Get alerts when new job positions are open on Bamboo HR and other job platforms
|
||||||
|
- Website defacement monitoring
|
||||||
|
- Pokémon Card Restock Tracker / Pokémon TCG Tracker
|
||||||
|
- RegTech - stay ahead of regulatory changes, regulatory compliance
|
||||||
|
|
||||||
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver and Playwright!</a>_
|
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver and Playwright!</a>_
|
||||||
|
|
||||||
#### Key Features
|
#### Key Features
|
||||||
|
|
||||||
- Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions!
|
- Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions!
|
||||||
- Target elements with xPath and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
|
- Target elements with xPath(1.0) and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
|
||||||
- Switch between fast non-JS and Chrome JS based "fetchers"
|
- Switch between fast non-JS and Chrome JS based "fetchers"
|
||||||
|
- Track changes in PDF files (Monitor text changed in the PDF, Also monitor PDF filesize and checksums)
|
||||||
- Easily specify how often a site should be checked
|
- Easily specify how often a site should be checked
|
||||||
- Execute JS before extracting text (Good for logging in, see examples in the UI!)
|
- Execute JS before extracting text (Good for logging in, see examples in the UI!)
|
||||||
- Override Request Headers, Specify `POST` or `GET` and other methods
|
- Override Request Headers, Specify `POST` or `GET` and other methods
|
||||||
- Use the "Visual Selector" to help target specific elements
|
- Use the "Visual Selector" to help target specific elements
|
||||||
|
- Configurable [proxy per watch](https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration)
|
||||||
|
- Send a screenshot with the notification when a change is detected in the web page
|
||||||
|
|
||||||
|
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $100 using our signup link.
|
||||||
|
|
||||||
|
[Oxylabs](https://oxylabs.go2cloud.org/SH2d) is also an excellent proxy provider and well worth using, they offer Residental, ISP, Rotating and many other proxy types to suit your project.
|
||||||
|
|
||||||
|
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -54,5 +95,5 @@ $ changedetection.io -d /path/to/empty/data/dir -p 5000
|
|||||||
|
|
||||||
Then visit http://127.0.0.1:5000 , You should now be able to access the UI.
|
Then visit http://127.0.0.1:5000 , You should now be able to access the UI.
|
||||||
|
|
||||||
See https://github.com/dgtlmoon/changedetection.io for more information.
|
See https://changedetection.io for more information.
|
||||||
|
|
||||||
|
|||||||
27
README.md
27
README.md
@@ -5,13 +5,13 @@
|
|||||||
_Live your data-life pro-actively._
|
_Live your data-life pro-actively._
|
||||||
|
|
||||||
|
|
||||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start?src=github)
|
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web site page change monitoring" title="Self-hosted web site page change monitoring" />](https://changedetection.io?src=github)
|
||||||
|
|
||||||
[![Release Version][release-shield]][release-link] [![Docker Pulls][docker-pulls]][docker-link] [![License][license-shield]](LICENSE.md)
|
[![Release Version][release-shield]][release-link] [![Docker Pulls][docker-pulls]][docker-link] [![License][license-shield]](LICENSE.md)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
[**Don't have time? Let us host it for you! try our $8.99/month subscription - use our proxies and support!**](https://lemonade.changedetection.io/start) , _half the price of other website change monitoring services and comes with unlimited watches & checks!_
|
[**Don't have time? Let us host it for you! try our $8.99/month subscription - use our proxies and support!**](https://changedetection.io) , _half the price of other website change monitoring services!_
|
||||||
|
|
||||||
- Chrome browser included.
|
- Chrome browser included.
|
||||||
- Super fast, no registration needed setup.
|
- Super fast, no registration needed setup.
|
||||||
@@ -22,11 +22,11 @@ _Live your data-life pro-actively._
|
|||||||
|
|
||||||
Available when connected to a <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Playwright-content-fetcher">playwright content fetcher</a> (included as part of our subscription service)
|
Available when connected to a <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Playwright-content-fetcher">playwright content fetcher</a> (included as part of our subscription service)
|
||||||
|
|
||||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/visualselector-anim.gif" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />](https://lemonade.changedetection.io/start?src=github)
|
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/visualselector-anim.gif" style="max-width:100%;" alt="Select parts and elements of a web page to monitor for changes" title="Select parts and elements of a web page to monitor for changes" />](https://changedetection.io?src=github)
|
||||||
|
|
||||||
### Easily see what changed, examine by word, line, or individual character.
|
### Easily see what changed, examine by word, line, or individual character.
|
||||||
|
|
||||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot-diff.png" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />](https://lemonade.changedetection.io/start?src=github)
|
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot-diff.png" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />](https://changedetection.io?src=github)
|
||||||
|
|
||||||
|
|
||||||
### Perform interactive browser steps
|
### Perform interactive browser steps
|
||||||
@@ -35,7 +35,7 @@ Fill in text boxes, click buttons and more, setup your changedetection scenario.
|
|||||||
|
|
||||||
Using the **Browser Steps** configuration, add basic steps before performing change detection, such as logging into websites, adding a product to a cart, accept cookie logins, entering dates and refining searches.
|
Using the **Browser Steps** configuration, add basic steps before performing change detection, such as logging into websites, adding a product to a cart, accept cookie logins, entering dates and refining searches.
|
||||||
|
|
||||||
[<img src="docs/browsersteps-anim.gif" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Website change detection with interactive browser steps, login, cookies etc" />](https://lemonade.changedetection.io/start?src=github)
|
[<img src="docs/browsersteps-anim.gif" style="max-width:100%;" alt="Website change detection with interactive browser steps, detect changes behind login and password, search queries and more" title="Website change detection with interactive browser steps, detect changes behind login and password, search queries and more" />](https://changedetection.io?src=github)
|
||||||
|
|
||||||
After **Browser Steps** have been run, then visit the **Visual Selector** tab to refine the content you're interested in.
|
After **Browser Steps** have been run, then visit the **Visual Selector** tab to refine the content you're interested in.
|
||||||
Requires Playwright to be enabled.
|
Requires Playwright to be enabled.
|
||||||
@@ -65,13 +65,16 @@ Requires Playwright to be enabled.
|
|||||||
- Get notified when certain keywords appear in Twitter search results
|
- Get notified when certain keywords appear in Twitter search results
|
||||||
- Proactively search for jobs, get notified when companies update their careers page, search job portals for keywords.
|
- Proactively search for jobs, get notified when companies update their careers page, search job portals for keywords.
|
||||||
- Get alerts when new job positions are open on Bamboo HR and other job platforms
|
- Get alerts when new job positions are open on Bamboo HR and other job platforms
|
||||||
|
- Website defacement monitoring
|
||||||
|
- Pokémon Card Restock Tracker / Pokémon TCG Tracker
|
||||||
|
- RegTech - stay ahead of regulatory changes, regulatory compliance
|
||||||
|
|
||||||
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver and Playwright!</a>_
|
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver and Playwright!</a>_
|
||||||
|
|
||||||
#### Key Features
|
#### Key Features
|
||||||
|
|
||||||
- Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions!
|
- Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions!
|
||||||
- Target elements with xPath and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
|
- Target elements with xPath(1.0) and CSS Selectors, Easily monitor complex JSON with JSONPath or jq
|
||||||
- Switch between fast non-JS and Chrome JS based "fetchers"
|
- Switch between fast non-JS and Chrome JS based "fetchers"
|
||||||
- Track changes in PDF files (Monitor text changed in the PDF, Also monitor PDF filesize and checksums)
|
- Track changes in PDF files (Monitor text changed in the PDF, Also monitor PDF filesize and checksums)
|
||||||
- Easily specify how often a site should be checked
|
- Easily specify how often a site should be checked
|
||||||
@@ -83,6 +86,8 @@ _Need an actual Chrome runner with Javascript support? We support fetching via W
|
|||||||
|
|
||||||
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $100 using our signup link.
|
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $100 using our signup link.
|
||||||
|
|
||||||
|
[Oxylabs](https://oxylabs.go2cloud.org/SH2d) is also an excellent proxy provider and well worth using, they offer Residental, ISP, Rotating and many other proxy types to suit your project.
|
||||||
|
|
||||||
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
@@ -142,8 +147,8 @@ See the wiki for more information https://github.com/dgtlmoon/changedetection.io
|
|||||||
|
|
||||||
## Filters
|
## Filters
|
||||||
|
|
||||||
XPath, JSONPath, jq, and CSS support comes baked in! You can be as specific as you need, use XPath exported from various XPath element query creation tools.
|
XPath(1.0), JSONPath, jq, and CSS support comes baked in! You can be as specific as you need, use XPath exported from various XPath element query creation tools.
|
||||||
(We support LXML `re:test`, `re:math` and `re:replace`.)
|
(We support LXML `re:test`, `re:match` and `re:replace`.)
|
||||||
|
|
||||||
## Notifications
|
## Notifications
|
||||||
|
|
||||||
@@ -181,7 +186,7 @@ This will re-parse the JSON and apply formatting to the text, making it super ea
|
|||||||
|
|
||||||
### JSONPath or jq?
|
### JSONPath or jq?
|
||||||
|
|
||||||
For more complex parsing, filtering, and modifying of JSON data, jq is recommended due to the built-in operators and functions. Refer to the [documentation](https://stedolan.github.io/jq/manual/) for more specifc information on jq.
|
For more complex parsing, filtering, and modifying of JSON data, jq is recommended due to the built-in operators and functions. Refer to the [documentation](https://stedolan.github.io/jq/manual/) for more specific information on jq.
|
||||||
|
|
||||||
One big advantage of `jq` is that you can use logic in your JSON filter, such as filters to only show items that have a value greater than/less than etc.
|
One big advantage of `jq` is that you can use logic in your JSON filter, such as filters to only show items that have a value greater than/less than etc.
|
||||||
|
|
||||||
@@ -221,7 +226,7 @@ The application also supports notifying you that it can follow this information
|
|||||||
|
|
||||||
## Proxy Configuration
|
## Proxy Configuration
|
||||||
|
|
||||||
See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration , we also support using [BrightData proxy services where possible]( https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support)
|
See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration , we also support using [Bright Data proxy services where possible](https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support) and [Oxylabs](https://oxylabs.go2cloud.org/SH2d) proxy services.
|
||||||
|
|
||||||
## Raspberry Pi support?
|
## Raspberry Pi support?
|
||||||
|
|
||||||
@@ -236,7 +241,7 @@ Supports managing the website watch list [via our API](https://changedetection.i
|
|||||||
Do you use changedetection.io to make money? does it save you time or money? Does it make your life easier? less stressful? Remember, we write this software when we should be doing actual paid work, we have to buy food and pay rent just like you.
|
Do you use changedetection.io to make money? does it save you time or money? Does it make your life easier? less stressful? Remember, we write this software when we should be doing actual paid work, we have to buy food and pay rent just like you.
|
||||||
|
|
||||||
|
|
||||||
Firstly, consider taking out a [change detection monthly subscription - unlimited checks and watches](https://lemonade.changedetection.io/start) , even if you don't use it, you still get the warm fuzzy feeling of helping out the project. (And who knows, you might just use it!)
|
Firstly, consider taking out a [change detection monthly subscription - unlimited checks and watches](https://changedetection.io?src=github) , even if you don't use it, you still get the warm fuzzy feeling of helping out the project. (And who knows, you might just use it!)
|
||||||
|
|
||||||
Or directly donate an amount PayPal [](https://www.paypal.com/donate/?hosted_button_id=7CP6HR9ZCNDYJ)
|
Or directly donate an amount PayPal [](https://www.paypal.com/donate/?hosted_button_id=7CP6HR9ZCNDYJ)
|
||||||
|
|
||||||
|
|||||||
@@ -33,10 +33,14 @@ from flask import (
|
|||||||
url_for,
|
url_for,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from flask_paginate import Pagination, get_page_parameter
|
||||||
|
|
||||||
from changedetectionio import html_tools
|
from changedetectionio import html_tools
|
||||||
from changedetectionio.api import api_v1
|
from changedetectionio.api import api_v1
|
||||||
|
|
||||||
__version__ = '0.41.1'
|
__version__ = '0.45.3'
|
||||||
|
|
||||||
|
from changedetectionio.store import BASE_URL_NOT_SET_TEXT
|
||||||
|
|
||||||
datastore = None
|
datastore = None
|
||||||
|
|
||||||
@@ -122,6 +126,15 @@ def _jinja2_filter_datetimestamp(timestamp, format="%Y-%m-%d %H:%M:%S"):
|
|||||||
|
|
||||||
return timeago.format(timestamp, time.time())
|
return timeago.format(timestamp, time.time())
|
||||||
|
|
||||||
|
|
||||||
|
@app.template_filter('pagination_slice')
|
||||||
|
def _jinja2_filter_pagination_slice(arr, skip):
|
||||||
|
per_page = datastore.data['settings']['application'].get('pager_size', 50)
|
||||||
|
if per_page:
|
||||||
|
return arr[skip:skip + per_page]
|
||||||
|
|
||||||
|
return arr
|
||||||
|
|
||||||
@app.template_filter('format_seconds_ago')
|
@app.template_filter('format_seconds_ago')
|
||||||
def _jinja2_filter_seconds_precise(timestamp):
|
def _jinja2_filter_seconds_precise(timestamp):
|
||||||
if timestamp == False:
|
if timestamp == False:
|
||||||
@@ -306,25 +319,21 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
return "Access denied, bad token", 403
|
return "Access denied, bad token", 403
|
||||||
|
|
||||||
from . import diff
|
from . import diff
|
||||||
limit_tag = request.args.get('tag')
|
limit_tag = request.args.get('tag', '').lower().strip()
|
||||||
|
# Be sure limit_tag is a uuid
|
||||||
|
for uuid, tag in datastore.data['settings']['application'].get('tags', {}).items():
|
||||||
|
if limit_tag == tag.get('title', '').lower().strip():
|
||||||
|
limit_tag = uuid
|
||||||
|
|
||||||
# Sort by last_changed and add the uuid which is usually the key..
|
# Sort by last_changed and add the uuid which is usually the key..
|
||||||
sorted_watches = []
|
sorted_watches = []
|
||||||
|
|
||||||
# @todo needs a .itemsWithTag() or something - then we can use that in Jinaj2 and throw this away
|
# @todo needs a .itemsWithTag() or something - then we can use that in Jinaj2 and throw this away
|
||||||
for uuid, watch in datastore.data['watching'].items():
|
for uuid, watch in datastore.data['watching'].items():
|
||||||
|
if limit_tag and not limit_tag in watch['tags']:
|
||||||
if limit_tag != None:
|
continue
|
||||||
# Support for comma separated list of tags.
|
watch['uuid'] = uuid
|
||||||
for tag_in_watch in watch['tag'].split(','):
|
sorted_watches.append(watch)
|
||||||
tag_in_watch = tag_in_watch.strip()
|
|
||||||
if tag_in_watch == limit_tag:
|
|
||||||
watch['uuid'] = uuid
|
|
||||||
sorted_watches.append(watch)
|
|
||||||
|
|
||||||
else:
|
|
||||||
watch['uuid'] = uuid
|
|
||||||
sorted_watches.append(watch)
|
|
||||||
|
|
||||||
sorted_watches.sort(key=lambda x: x.last_changed, reverse=False)
|
sorted_watches.sort(key=lambda x: x.last_changed, reverse=False)
|
||||||
|
|
||||||
@@ -348,11 +357,11 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
|
|
||||||
# Include a link to the diff page, they will have to login here to see if password protection is enabled.
|
# Include a link to the diff page, they will have to login here to see if password protection is enabled.
|
||||||
# Description is the page you watch, link takes you to the diff JS UI page
|
# Description is the page you watch, link takes you to the diff JS UI page
|
||||||
base_url = datastore.data['settings']['application']['base_url']
|
# Dict val base_url will get overriden with the env var if it is set.
|
||||||
if base_url == '':
|
ext_base_url = datastore.data['settings']['application'].get('active_base_url')
|
||||||
base_url = "<base-url-env-var-not-set>"
|
|
||||||
|
|
||||||
diff_link = {'href': "{}{}".format(base_url, url_for('diff_history_page', uuid=watch['uuid']))}
|
# Because we are called via whatever web server, flask should figure out the right path (
|
||||||
|
diff_link = {'href': url_for('diff_history_page', uuid=watch['uuid'], _external=True)}
|
||||||
|
|
||||||
fe.link(link=diff_link)
|
fe.link(link=diff_link)
|
||||||
|
|
||||||
@@ -381,9 +390,17 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
@app.route("/", methods=['GET'])
|
@app.route("/", methods=['GET'])
|
||||||
@login_optionally_required
|
@login_optionally_required
|
||||||
def index():
|
def index():
|
||||||
|
global datastore
|
||||||
from changedetectionio import forms
|
from changedetectionio import forms
|
||||||
|
|
||||||
limit_tag = request.args.get('tag')
|
limit_tag = request.args.get('tag', '').lower().strip()
|
||||||
|
|
||||||
|
# Be sure limit_tag is a uuid
|
||||||
|
for uuid, tag in datastore.data['settings']['application'].get('tags', {}).items():
|
||||||
|
if limit_tag == tag.get('title', '').lower().strip():
|
||||||
|
limit_tag = uuid
|
||||||
|
|
||||||
|
|
||||||
# Redirect for the old rss path which used the /?rss=true
|
# Redirect for the old rss path which used the /?rss=true
|
||||||
if request.args.get('rss'):
|
if request.args.get('rss'):
|
||||||
return redirect(url_for('rss', tag=limit_tag))
|
return redirect(url_for('rss', tag=limit_tag))
|
||||||
@@ -401,44 +418,60 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
|
|
||||||
# Sort by last_changed and add the uuid which is usually the key..
|
# Sort by last_changed and add the uuid which is usually the key..
|
||||||
sorted_watches = []
|
sorted_watches = []
|
||||||
|
search_q = request.args.get('q').strip().lower() if request.args.get('q') else False
|
||||||
for uuid, watch in datastore.data['watching'].items():
|
for uuid, watch in datastore.data['watching'].items():
|
||||||
|
if limit_tag and not limit_tag in watch['tags']:
|
||||||
if limit_tag != None:
|
|
||||||
# Support for comma separated list of tags.
|
|
||||||
if watch['tag'] is None:
|
|
||||||
continue
|
continue
|
||||||
for tag_in_watch in watch['tag'].split(','):
|
|
||||||
tag_in_watch = tag_in_watch.strip()
|
|
||||||
if tag_in_watch == limit_tag:
|
|
||||||
watch['uuid'] = uuid
|
|
||||||
sorted_watches.append(watch)
|
|
||||||
|
|
||||||
|
if search_q:
|
||||||
|
if (watch.get('title') and search_q in watch.get('title').lower()) or search_q in watch.get('url', '').lower():
|
||||||
|
sorted_watches.append(watch)
|
||||||
else:
|
else:
|
||||||
watch['uuid'] = uuid
|
|
||||||
sorted_watches.append(watch)
|
sorted_watches.append(watch)
|
||||||
|
|
||||||
existing_tags = datastore.get_all_tags()
|
|
||||||
form = forms.quickWatchForm(request.form)
|
form = forms.quickWatchForm(request.form)
|
||||||
|
page = request.args.get(get_page_parameter(), type=int, default=1)
|
||||||
|
total_count = len(sorted_watches)
|
||||||
|
|
||||||
|
pagination = Pagination(page=page,
|
||||||
|
total=total_count,
|
||||||
|
per_page=datastore.data['settings']['application'].get('pager_size', 50), css_framework="semantic")
|
||||||
|
|
||||||
|
|
||||||
output = render_template(
|
output = render_template(
|
||||||
"watch-overview.html",
|
"watch-overview.html",
|
||||||
# Don't link to hosting when we're on the hosting environment
|
# Don't link to hosting when we're on the hosting environment
|
||||||
active_tag=limit_tag,
|
active_tag=limit_tag,
|
||||||
app_rss_token=datastore.data['settings']['application']['rss_access_token'],
|
app_rss_token=datastore.data['settings']['application']['rss_access_token'],
|
||||||
|
datastore=datastore,
|
||||||
form=form,
|
form=form,
|
||||||
guid=datastore.data['app_guid'],
|
guid=datastore.data['app_guid'],
|
||||||
has_proxies=datastore.proxy_list,
|
has_proxies=datastore.proxy_list,
|
||||||
has_unviewed=datastore.has_unviewed,
|
has_unviewed=datastore.has_unviewed,
|
||||||
hosted_sticky=os.getenv("SALTED_PASS", False) == False,
|
hosted_sticky=os.getenv("SALTED_PASS", False) == False,
|
||||||
|
pagination=pagination,
|
||||||
queued_uuids=[q_uuid.item['uuid'] for q_uuid in update_q.queue],
|
queued_uuids=[q_uuid.item['uuid'] for q_uuid in update_q.queue],
|
||||||
|
search_q=request.args.get('q','').strip(),
|
||||||
|
sort_attribute=request.args.get('sort') if request.args.get('sort') else request.cookies.get('sort'),
|
||||||
|
sort_order=request.args.get('order') if request.args.get('order') else request.cookies.get('order'),
|
||||||
system_default_fetcher=datastore.data['settings']['application'].get('fetch_backend'),
|
system_default_fetcher=datastore.data['settings']['application'].get('fetch_backend'),
|
||||||
tags=existing_tags,
|
tags=datastore.data['settings']['application'].get('tags'),
|
||||||
watches=sorted_watches
|
watches=sorted_watches
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if session.get('share-link'):
|
if session.get('share-link'):
|
||||||
del(session['share-link'])
|
del(session['share-link'])
|
||||||
return output
|
|
||||||
|
resp = make_response(output)
|
||||||
|
|
||||||
|
# The template can run on cookie or url query info
|
||||||
|
if request.args.get('sort'):
|
||||||
|
resp.set_cookie('sort', request.args.get('sort'))
|
||||||
|
if request.args.get('order'):
|
||||||
|
resp.set_cookie('order', request.args.get('order'))
|
||||||
|
|
||||||
|
return resp
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# AJAX endpoint for sending a test
|
# AJAX endpoint for sending a test
|
||||||
@@ -463,11 +496,19 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
n_object = {'watch_url': request.form['window_url'],
|
n_object = {'watch_url': request.form['window_url'],
|
||||||
'notification_urls': request.form['notification_urls'].splitlines(),
|
'notification_urls': request.form['notification_urls'].splitlines()
|
||||||
'notification_title': request.form['notification_title'].strip(),
|
|
||||||
'notification_body': request.form['notification_body'].strip(),
|
|
||||||
'notification_format': request.form['notification_format'].strip()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Only use if present, if not set in n_object it should use the default system value
|
||||||
|
if 'notification_format' in request.form and request.form['notification_format'].strip():
|
||||||
|
n_object['notification_format'] = request.form.get('notification_format', '').strip()
|
||||||
|
|
||||||
|
if 'notification_title' in request.form and request.form['notification_title'].strip():
|
||||||
|
n_object['notification_title'] = request.form.get('notification_title', '').strip()
|
||||||
|
|
||||||
|
if 'notification_body' in request.form and request.form['notification_body'].strip():
|
||||||
|
n_object['notification_body'] = request.form.get('notification_body', '').strip()
|
||||||
|
|
||||||
notification_q.put(n_object)
|
notification_q.put(n_object)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return make_response({'error': str(e)}, 400)
|
return make_response({'error': str(e)}, 400)
|
||||||
@@ -557,9 +598,13 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
|
|
||||||
# proxy_override set to the json/text list of the items
|
# proxy_override set to the json/text list of the items
|
||||||
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||||
data=default,
|
data=default
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# For the form widget tag uuid lookup
|
||||||
|
form.tags.datastore = datastore # in _value
|
||||||
|
|
||||||
|
|
||||||
form.fetch_backend.choices.append(("system", 'System settings default'))
|
form.fetch_backend.choices.append(("system", 'System settings default'))
|
||||||
|
|
||||||
# form.browser_steps[0] can be assumed that we 'goto url' first
|
# form.browser_steps[0] can be assumed that we 'goto url' first
|
||||||
@@ -610,6 +655,16 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
extra_update_obj['filter_text_replaced'] = True
|
extra_update_obj['filter_text_replaced'] = True
|
||||||
extra_update_obj['filter_text_removed'] = True
|
extra_update_obj['filter_text_removed'] = True
|
||||||
|
|
||||||
|
# Because wtforms doesn't support accessing other data in process_ , but we convert the CSV list of tags back to a list of UUIDs
|
||||||
|
tag_uuids = []
|
||||||
|
if form.data.get('tags'):
|
||||||
|
# Sometimes in testing this can be list, dont know why
|
||||||
|
if type(form.data.get('tags')) == list:
|
||||||
|
extra_update_obj['tags'] = form.data.get('tags')
|
||||||
|
else:
|
||||||
|
for t in form.data.get('tags').split(','):
|
||||||
|
tag_uuids.append(datastore.add_tag(name=t))
|
||||||
|
extra_update_obj['tags'] = tag_uuids
|
||||||
|
|
||||||
datastore.data['watching'][uuid].update(form.data)
|
datastore.data['watching'][uuid].update(form.data)
|
||||||
datastore.data['watching'][uuid].update(extra_update_obj)
|
datastore.data['watching'][uuid].update(extra_update_obj)
|
||||||
@@ -659,11 +714,11 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
output = render_template("edit.html",
|
output = render_template("edit.html",
|
||||||
available_processors=processors.available_processors(),
|
available_processors=processors.available_processors(),
|
||||||
browser_steps_config=browser_step_ui_config,
|
browser_steps_config=browser_step_ui_config,
|
||||||
current_base_url=datastore.data['settings']['application']['base_url'],
|
|
||||||
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
|
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
|
||||||
form=form,
|
form=form,
|
||||||
has_default_notification_urls=True if len(datastore.data['settings']['application']['notification_urls']) else False,
|
has_default_notification_urls=True if len(datastore.data['settings']['application']['notification_urls']) else False,
|
||||||
has_empty_checktime=using_default_check_time,
|
has_empty_checktime=using_default_check_time,
|
||||||
|
has_extra_headers_file=len(datastore.get_all_headers_in_textfile_for_watch(uuid=uuid)) > 0,
|
||||||
is_html_webdriver=is_html_webdriver,
|
is_html_webdriver=is_html_webdriver,
|
||||||
jq_support=jq_support,
|
jq_support=jq_support,
|
||||||
playwright_enabled=os.getenv('PLAYWRIGHT_DRIVER_URL', False),
|
playwright_enabled=os.getenv('PLAYWRIGHT_DRIVER_URL', False),
|
||||||
@@ -748,7 +803,6 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
|
|
||||||
output = render_template("settings.html",
|
output = render_template("settings.html",
|
||||||
form=form,
|
form=form,
|
||||||
current_base_url = datastore.data['settings']['application']['base_url'],
|
|
||||||
hide_remove_pass=os.getenv("SALTED_PASS", False),
|
hide_remove_pass=os.getenv("SALTED_PASS", False),
|
||||||
api_key=datastore.data['settings']['application'].get('api_access_token'),
|
api_key=datastore.data['settings']['application'].get('api_access_token'),
|
||||||
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
|
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
|
||||||
@@ -1060,8 +1114,8 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
os.path.join(datastore_o.datastore_path, list_with_tags_file), "w"
|
os.path.join(datastore_o.datastore_path, list_with_tags_file), "w"
|
||||||
) as f:
|
) as f:
|
||||||
for uuid in datastore.data["watching"]:
|
for uuid in datastore.data["watching"]:
|
||||||
url = datastore.data["watching"][uuid]["url"]
|
url = datastore.data["watching"][uuid].get('url')
|
||||||
tag = datastore.data["watching"][uuid]["tag"]
|
tag = datastore.data["watching"][uuid].get('tags', {})
|
||||||
f.write("{} {}\r\n".format(url, tag))
|
f.write("{} {}\r\n".format(url, tag))
|
||||||
|
|
||||||
# Add it to the Zip
|
# Add it to the Zip
|
||||||
@@ -1149,7 +1203,7 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
|
|
||||||
add_paused = request.form.get('edit_and_watch_submit_button') != None
|
add_paused = request.form.get('edit_and_watch_submit_button') != None
|
||||||
processor = request.form.get('processor', 'text_json_diff')
|
processor = request.form.get('processor', 'text_json_diff')
|
||||||
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip(), extras={'paused': add_paused, 'processor': processor})
|
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tags').strip(), extras={'paused': add_paused, 'processor': processor})
|
||||||
|
|
||||||
if new_uuid:
|
if new_uuid:
|
||||||
if add_paused:
|
if add_paused:
|
||||||
@@ -1214,12 +1268,14 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
||||||
i = 1
|
i = 1
|
||||||
|
|
||||||
elif tag != None:
|
elif tag:
|
||||||
# Items that have this current tag
|
# Items that have this current tag
|
||||||
for watch_uuid, watch in datastore.data['watching'].items():
|
for watch_uuid, watch in datastore.data['watching'].items():
|
||||||
if (tag != None and tag in watch['tag']):
|
if tag in watch.get('tags', {}):
|
||||||
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
||||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False}))
|
update_q.put(
|
||||||
|
queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False})
|
||||||
|
)
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -1258,6 +1314,13 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
datastore.data['watching'][uuid.strip()]['paused'] = False
|
datastore.data['watching'][uuid.strip()]['paused'] = False
|
||||||
flash("{} watches unpaused".format(len(uuids)))
|
flash("{} watches unpaused".format(len(uuids)))
|
||||||
|
|
||||||
|
elif (op == 'mark-viewed'):
|
||||||
|
for uuid in uuids:
|
||||||
|
uuid = uuid.strip()
|
||||||
|
if datastore.data['watching'].get(uuid):
|
||||||
|
datastore.set_last_viewed(uuid, int(time.time()))
|
||||||
|
flash("{} watches updated".format(len(uuids)))
|
||||||
|
|
||||||
elif (op == 'mute'):
|
elif (op == 'mute'):
|
||||||
for uuid in uuids:
|
for uuid in uuids:
|
||||||
uuid = uuid.strip()
|
uuid = uuid.strip()
|
||||||
@@ -1280,6 +1343,13 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
||||||
flash("{} watches queued for rechecking".format(len(uuids)))
|
flash("{} watches queued for rechecking".format(len(uuids)))
|
||||||
|
|
||||||
|
elif (op == 'clear-history'):
|
||||||
|
for uuid in uuids:
|
||||||
|
uuid = uuid.strip()
|
||||||
|
if datastore.data['watching'].get(uuid):
|
||||||
|
datastore.clear_watch_history(uuid)
|
||||||
|
flash("{} watches cleared/reset.".format(len(uuids)))
|
||||||
|
|
||||||
elif (op == 'notification-default'):
|
elif (op == 'notification-default'):
|
||||||
from changedetectionio.notification import (
|
from changedetectionio.notification import (
|
||||||
default_notification_format_for_watch
|
default_notification_format_for_watch
|
||||||
@@ -1293,6 +1363,18 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
datastore.data['watching'][uuid.strip()]['notification_format'] = default_notification_format_for_watch
|
datastore.data['watching'][uuid.strip()]['notification_format'] = default_notification_format_for_watch
|
||||||
flash("{} watches set to use default notification settings".format(len(uuids)))
|
flash("{} watches set to use default notification settings".format(len(uuids)))
|
||||||
|
|
||||||
|
elif (op == 'assign-tag'):
|
||||||
|
op_extradata = request.form.get('op_extradata', '').strip()
|
||||||
|
if op_extradata:
|
||||||
|
tag_uuid = datastore.add_tag(name=op_extradata)
|
||||||
|
if op_extradata and tag_uuid:
|
||||||
|
for uuid in uuids:
|
||||||
|
uuid = uuid.strip()
|
||||||
|
if datastore.data['watching'].get(uuid):
|
||||||
|
datastore.data['watching'][uuid]['tags'].append(tag_uuid)
|
||||||
|
|
||||||
|
flash("{} watches assigned tag".format(len(uuids)))
|
||||||
|
|
||||||
return redirect(url_for('index'))
|
return redirect(url_for('index'))
|
||||||
|
|
||||||
@app.route("/api/share-url", methods=['GET'])
|
@app.route("/api/share-url", methods=['GET'])
|
||||||
@@ -1302,7 +1384,6 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
the share-link can be imported/added"""
|
the share-link can be imported/added"""
|
||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
tag = request.args.get('tag')
|
|
||||||
uuid = request.args.get('uuid')
|
uuid = request.args.get('uuid')
|
||||||
|
|
||||||
# more for testing
|
# more for testing
|
||||||
@@ -1349,12 +1430,39 @@ def changedetection_app(config=None, datastore_o=None):
|
|||||||
# paste in etc
|
# paste in etc
|
||||||
return redirect(url_for('index'))
|
return redirect(url_for('index'))
|
||||||
|
|
||||||
|
@app.route("/highlight_submit_ignore_url", methods=['POST'])
|
||||||
|
def highlight_submit_ignore_url():
|
||||||
|
import re
|
||||||
|
mode = request.form.get('mode')
|
||||||
|
selection = request.form.get('selection')
|
||||||
|
|
||||||
|
uuid = request.args.get('uuid','')
|
||||||
|
if datastore.data["watching"].get(uuid):
|
||||||
|
if mode == 'exact':
|
||||||
|
for l in selection.splitlines():
|
||||||
|
datastore.data["watching"][uuid]['ignore_text'].append(l.strip())
|
||||||
|
elif mode == 'digit-regex':
|
||||||
|
for l in selection.splitlines():
|
||||||
|
# Replace any series of numbers with a regex
|
||||||
|
s = re.escape(l.strip())
|
||||||
|
s = re.sub(r'[0-9]+', r'\\d+', s)
|
||||||
|
datastore.data["watching"][uuid]['ignore_text'].append('/' + s + '/')
|
||||||
|
|
||||||
|
return f"<a href={url_for('preview_page', uuid=uuid)}>Click to preview</a>"
|
||||||
|
|
||||||
|
|
||||||
import changedetectionio.blueprint.browser_steps as browser_steps
|
import changedetectionio.blueprint.browser_steps as browser_steps
|
||||||
app.register_blueprint(browser_steps.construct_blueprint(datastore), url_prefix='/browser-steps')
|
app.register_blueprint(browser_steps.construct_blueprint(datastore), url_prefix='/browser-steps')
|
||||||
|
|
||||||
import changedetectionio.blueprint.price_data_follower as price_data_follower
|
import changedetectionio.blueprint.price_data_follower as price_data_follower
|
||||||
app.register_blueprint(price_data_follower.construct_blueprint(datastore, update_q), url_prefix='/price_data_follower')
|
app.register_blueprint(price_data_follower.construct_blueprint(datastore, update_q), url_prefix='/price_data_follower')
|
||||||
|
|
||||||
|
import changedetectionio.blueprint.tags as tags
|
||||||
|
app.register_blueprint(tags.construct_blueprint(datastore), url_prefix='/tags')
|
||||||
|
|
||||||
|
import changedetectionio.blueprint.check_proxies as check_proxies
|
||||||
|
app.register_blueprint(check_proxies.construct_blueprint(datastore=datastore), url_prefix='/check_proxy')
|
||||||
|
|
||||||
|
|
||||||
# @todo handle ctrl break
|
# @todo handle ctrl break
|
||||||
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
|
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
|
||||||
@@ -1394,6 +1502,7 @@ def check_for_new_version():
|
|||||||
# Check daily
|
# Check daily
|
||||||
app.config.exit.wait(86400)
|
app.config.exit.wait(86400)
|
||||||
|
|
||||||
|
|
||||||
def notification_runner():
|
def notification_runner():
|
||||||
global notification_debug_log
|
global notification_debug_log
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
import os
|
||||||
|
from distutils.util import strtobool
|
||||||
|
|
||||||
from flask_expects_json import expects_json
|
from flask_expects_json import expects_json
|
||||||
from changedetectionio import queuedWatchMetaData
|
from changedetectionio import queuedWatchMetaData
|
||||||
from flask_restful import abort, Resource
|
from flask_restful import abort, Resource
|
||||||
@@ -33,7 +36,7 @@ class Watch(Resource):
|
|||||||
@auth.check_token
|
@auth.check_token
|
||||||
def get(self, uuid):
|
def get(self, uuid):
|
||||||
"""
|
"""
|
||||||
@api {get} /api/v1/watch/:uuid Get a single watch data
|
@api {get} /api/v1/watch/:uuid Single watch - get data, recheck, pause, mute.
|
||||||
@apiDescription Retrieve watch information and set muted/paused status
|
@apiDescription Retrieve watch information and set muted/paused status
|
||||||
@apiExample {curl} Example usage:
|
@apiExample {curl} Example usage:
|
||||||
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -H"x-api-key:813031b16330fe25e3780cf0325daa45"
|
curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -H"x-api-key:813031b16330fe25e3780cf0325daa45"
|
||||||
@@ -209,7 +212,9 @@ class CreateWatch(Resource):
|
|||||||
json_data = request.get_json()
|
json_data = request.get_json()
|
||||||
url = json_data['url'].strip()
|
url = json_data['url'].strip()
|
||||||
|
|
||||||
if not validators.url(json_data['url'].strip()):
|
# If hosts that only contain alphanumerics are allowed ("localhost" for example)
|
||||||
|
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
|
||||||
|
if not validators.url(url, simple_host=allow_simplehost):
|
||||||
return "Invalid or unsupported URL", 400
|
return "Invalid or unsupported URL", 400
|
||||||
|
|
||||||
if json_data.get('proxy'):
|
if json_data.get('proxy'):
|
||||||
@@ -218,9 +223,16 @@ class CreateWatch(Resource):
|
|||||||
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
|
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
|
||||||
|
|
||||||
extras = copy.deepcopy(json_data)
|
extras = copy.deepcopy(json_data)
|
||||||
|
|
||||||
|
# Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API)
|
||||||
|
tags = None
|
||||||
|
if extras.get('tag'):
|
||||||
|
tags = extras.get('tag')
|
||||||
|
del extras['tag']
|
||||||
|
|
||||||
del extras['url']
|
del extras['url']
|
||||||
|
|
||||||
new_uuid = self.datastore.add_watch(url=url, extras=extras)
|
new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags)
|
||||||
if new_uuid:
|
if new_uuid:
|
||||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
|
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
|
||||||
return {'uuid': new_uuid}, 201
|
return {'uuid': new_uuid}, 201
|
||||||
@@ -259,13 +271,16 @@ class CreateWatch(Resource):
|
|||||||
"""
|
"""
|
||||||
list = {}
|
list = {}
|
||||||
|
|
||||||
tag_limit = request.args.get('tag', None)
|
tag_limit = request.args.get('tag', '').lower()
|
||||||
for k, watch in self.datastore.data['watching'].items():
|
|
||||||
if tag_limit:
|
|
||||||
if not tag_limit.lower() in watch.all_tags:
|
|
||||||
continue
|
|
||||||
|
|
||||||
list[k] = {'url': watch['url'],
|
|
||||||
|
for uuid, watch in self.datastore.data['watching'].items():
|
||||||
|
# Watch tags by name (replace the other calls?)
|
||||||
|
tags = self.datastore.get_all_tags_for_watch(uuid=uuid)
|
||||||
|
if tag_limit and not any(v.get('title').lower() == tag_limit for k, v in tags.items()):
|
||||||
|
continue
|
||||||
|
|
||||||
|
list[uuid] = {'url': watch['url'],
|
||||||
'title': watch['title'],
|
'title': watch['title'],
|
||||||
'last_checked': watch['last_checked'],
|
'last_checked': watch['last_checked'],
|
||||||
'last_changed': watch.last_changed,
|
'last_changed': watch.last_changed,
|
||||||
|
|||||||
@@ -27,58 +27,106 @@ import os
|
|||||||
import logging
|
import logging
|
||||||
from changedetectionio.store import ChangeDetectionStore
|
from changedetectionio.store import ChangeDetectionStore
|
||||||
from changedetectionio import login_optionally_required
|
from changedetectionio import login_optionally_required
|
||||||
browsersteps_live_ui_o = {}
|
|
||||||
browsersteps_playwright_browser_interface = None
|
|
||||||
browsersteps_playwright_browser_interface_browser = None
|
|
||||||
browsersteps_playwright_browser_interface_context = None
|
|
||||||
browsersteps_playwright_browser_interface_end_time = None
|
|
||||||
browsersteps_playwright_browser_interface_start_time = None
|
|
||||||
|
|
||||||
def cleanup_playwright_session():
|
browsersteps_sessions = {}
|
||||||
|
io_interface_context = None
|
||||||
|
|
||||||
global browsersteps_live_ui_o
|
|
||||||
global browsersteps_playwright_browser_interface
|
|
||||||
global browsersteps_playwright_browser_interface_browser
|
|
||||||
global browsersteps_playwright_browser_interface_context
|
|
||||||
global browsersteps_playwright_browser_interface_end_time
|
|
||||||
global browsersteps_playwright_browser_interface_start_time
|
|
||||||
|
|
||||||
browsersteps_live_ui_o = {}
|
|
||||||
browsersteps_playwright_browser_interface = None
|
|
||||||
browsersteps_playwright_browser_interface_browser = None
|
|
||||||
browsersteps_playwright_browser_interface_end_time = None
|
|
||||||
browsersteps_playwright_browser_interface_start_time = None
|
|
||||||
|
|
||||||
print("Cleaning up old playwright session because time was up, calling .goodbye()")
|
|
||||||
try:
|
|
||||||
browsersteps_playwright_browser_interface_context.goodbye()
|
|
||||||
except Exception as e:
|
|
||||||
print ("Got exception in shutdown, probably OK")
|
|
||||||
print (str(e))
|
|
||||||
|
|
||||||
browsersteps_playwright_browser_interface_context = None
|
|
||||||
|
|
||||||
print ("Cleaning up old playwright session because time was up - done")
|
|
||||||
|
|
||||||
def construct_blueprint(datastore: ChangeDetectionStore):
|
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||||
|
|
||||||
browser_steps_blueprint = Blueprint('browser_steps', __name__, template_folder="templates")
|
browser_steps_blueprint = Blueprint('browser_steps', __name__, template_folder="templates")
|
||||||
|
|
||||||
|
def start_browsersteps_session(watch_uuid):
|
||||||
|
from . import nonContext
|
||||||
|
from . import browser_steps
|
||||||
|
import time
|
||||||
|
global browsersteps_sessions
|
||||||
|
global io_interface_context
|
||||||
|
|
||||||
|
|
||||||
|
# We keep the playwright session open for many minutes
|
||||||
|
seconds_keepalive = int(os.getenv('BROWSERSTEPS_MINUTES_KEEPALIVE', 10)) * 60
|
||||||
|
|
||||||
|
browsersteps_start_session = {'start_time': time.time()}
|
||||||
|
|
||||||
|
# You can only have one of these running
|
||||||
|
# This should be very fine to leave running for the life of the application
|
||||||
|
# @idea - Make it global so the pool of watch fetchers can use it also
|
||||||
|
if not io_interface_context:
|
||||||
|
io_interface_context = nonContext.c_sync_playwright()
|
||||||
|
# Start the Playwright context, which is actually a nodejs sub-process and communicates over STDIN/STDOUT pipes
|
||||||
|
io_interface_context = io_interface_context.start()
|
||||||
|
|
||||||
|
|
||||||
|
# keep it alive for 10 seconds more than we advertise, sometimes it helps to keep it shutting down cleanly
|
||||||
|
keepalive = "&timeout={}".format(((seconds_keepalive + 3) * 1000))
|
||||||
|
try:
|
||||||
|
browsersteps_start_session['browser'] = io_interface_context.chromium.connect_over_cdp(
|
||||||
|
os.getenv('PLAYWRIGHT_DRIVER_URL', '') + keepalive)
|
||||||
|
except Exception as e:
|
||||||
|
if 'ECONNREFUSED' in str(e):
|
||||||
|
return make_response('Unable to start the Playwright Browser session, is it running?', 401)
|
||||||
|
else:
|
||||||
|
return make_response(str(e), 401)
|
||||||
|
|
||||||
|
proxy_id = datastore.get_preferred_proxy_for_watch(uuid=watch_uuid)
|
||||||
|
proxy = None
|
||||||
|
if proxy_id:
|
||||||
|
proxy_url = datastore.proxy_list.get(proxy_id).get('url')
|
||||||
|
if proxy_url:
|
||||||
|
|
||||||
|
# Playwright needs separate username and password values
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
parsed = urlparse(proxy_url)
|
||||||
|
proxy = {'server': proxy_url}
|
||||||
|
|
||||||
|
if parsed.username:
|
||||||
|
proxy['username'] = parsed.username
|
||||||
|
|
||||||
|
if parsed.password:
|
||||||
|
proxy['password'] = parsed.password
|
||||||
|
|
||||||
|
print("Browser Steps: UUID {} selected proxy {}".format(watch_uuid, proxy_url))
|
||||||
|
|
||||||
|
# Tell Playwright to connect to Chrome and setup a new session via our stepper interface
|
||||||
|
browsersteps_start_session['browserstepper'] = browser_steps.browsersteps_live_ui(
|
||||||
|
playwright_browser=browsersteps_start_session['browser'],
|
||||||
|
proxy=proxy)
|
||||||
|
|
||||||
|
# For test
|
||||||
|
#browsersteps_start_session['browserstepper'].action_goto_url(value="http://example.com?time="+str(time.time()))
|
||||||
|
|
||||||
|
return browsersteps_start_session
|
||||||
|
|
||||||
|
|
||||||
@login_optionally_required
|
@login_optionally_required
|
||||||
@browser_steps_blueprint.route("/browsersteps_update", methods=['GET', 'POST'])
|
@browser_steps_blueprint.route("/browsersteps_start_session", methods=['GET'])
|
||||||
|
def browsersteps_start_session():
|
||||||
|
# A new session was requested, return sessionID
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
global browsersteps_sessions
|
||||||
|
|
||||||
|
browsersteps_session_id = str(uuid.uuid4())
|
||||||
|
watch_uuid = request.args.get('uuid')
|
||||||
|
|
||||||
|
if not watch_uuid:
|
||||||
|
return make_response('No Watch UUID specified', 500)
|
||||||
|
|
||||||
|
print("Starting connection with playwright")
|
||||||
|
logging.debug("browser_steps.py connecting")
|
||||||
|
browsersteps_sessions[browsersteps_session_id] = start_browsersteps_session(watch_uuid)
|
||||||
|
print("Starting connection with playwright - done")
|
||||||
|
return {'browsersteps_session_id': browsersteps_session_id}
|
||||||
|
|
||||||
|
# A request for an action was received
|
||||||
|
@login_optionally_required
|
||||||
|
@browser_steps_blueprint.route("/browsersteps_update", methods=['POST'])
|
||||||
def browsersteps_ui_update():
|
def browsersteps_ui_update():
|
||||||
import base64
|
import base64
|
||||||
import playwright._impl._api_types
|
import playwright._impl._api_types
|
||||||
import time
|
global browsersteps_sessions
|
||||||
|
|
||||||
from changedetectionio.blueprint.browser_steps import browser_steps
|
from changedetectionio.blueprint.browser_steps import browser_steps
|
||||||
|
|
||||||
global browsersteps_live_ui_o, browsersteps_playwright_browser_interface_end_time
|
|
||||||
global browsersteps_playwright_browser_interface_browser
|
|
||||||
global browsersteps_playwright_browser_interface
|
|
||||||
global browsersteps_playwright_browser_interface_start_time
|
|
||||||
|
|
||||||
step_n = None
|
|
||||||
remaining =0
|
remaining =0
|
||||||
uuid = request.args.get('uuid')
|
uuid = request.args.get('uuid')
|
||||||
|
|
||||||
@@ -87,13 +135,9 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
|||||||
if not browsersteps_session_id:
|
if not browsersteps_session_id:
|
||||||
return make_response('No browsersteps_session_id specified', 500)
|
return make_response('No browsersteps_session_id specified', 500)
|
||||||
|
|
||||||
# Because we don't "really" run in a context manager ( we make the playwright interface global/long-living )
|
if not browsersteps_sessions.get(browsersteps_session_id):
|
||||||
# We need to manage the shutdown when the time is up
|
return make_response('No session exists under that ID', 500)
|
||||||
if browsersteps_playwright_browser_interface_end_time:
|
|
||||||
remaining = browsersteps_playwright_browser_interface_end_time-time.time()
|
|
||||||
if browsersteps_playwright_browser_interface_end_time and remaining <= 0:
|
|
||||||
cleanup_playwright_session()
|
|
||||||
return make_response('Browser session expired, please reload the Browser Steps interface', 401)
|
|
||||||
|
|
||||||
# Actions - step/apply/etc, do the thing and return state
|
# Actions - step/apply/etc, do the thing and return state
|
||||||
if request.method == 'POST':
|
if request.method == 'POST':
|
||||||
@@ -112,12 +156,7 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
|||||||
# @todo try.. accept.. nice errors not popups..
|
# @todo try.. accept.. nice errors not popups..
|
||||||
try:
|
try:
|
||||||
|
|
||||||
this_session = browsersteps_live_ui_o.get(browsersteps_session_id)
|
browsersteps_sessions[browsersteps_session_id]['browserstepper'].call_action(action_name=step_operation,
|
||||||
if not this_session:
|
|
||||||
print("Browser exited")
|
|
||||||
return make_response('Browser session ran out of time :( Please reload this page.', 401)
|
|
||||||
|
|
||||||
this_session.call_action(action_name=step_operation,
|
|
||||||
selector=step_selector,
|
selector=step_selector,
|
||||||
optional_value=step_optional_value)
|
optional_value=step_optional_value)
|
||||||
|
|
||||||
@@ -129,99 +168,43 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
|||||||
# Get visual selector ready/update its data (also use the current filter info from the page?)
|
# Get visual selector ready/update its data (also use the current filter info from the page?)
|
||||||
# When the last 'apply' button was pressed
|
# When the last 'apply' button was pressed
|
||||||
# @todo this adds overhead because the xpath selection is happening twice
|
# @todo this adds overhead because the xpath selection is happening twice
|
||||||
u = this_session.page.url
|
u = browsersteps_sessions[browsersteps_session_id]['browserstepper'].page.url
|
||||||
if is_last_step and u:
|
if is_last_step and u:
|
||||||
(screenshot, xpath_data) = this_session.request_visualselector_data()
|
(screenshot, xpath_data) = browsersteps_sessions[browsersteps_session_id]['browserstepper'].request_visualselector_data()
|
||||||
datastore.save_screenshot(watch_uuid=uuid, screenshot=screenshot)
|
datastore.save_screenshot(watch_uuid=uuid, screenshot=screenshot)
|
||||||
datastore.save_xpath_data(watch_uuid=uuid, data=xpath_data)
|
datastore.save_xpath_data(watch_uuid=uuid, data=xpath_data)
|
||||||
|
|
||||||
# Setup interface
|
# if not this_session.page:
|
||||||
if request.method == 'GET':
|
# cleanup_playwright_session()
|
||||||
|
# return make_response('Browser session ran out of time :( Please reload this page.', 401)
|
||||||
|
|
||||||
if not browsersteps_playwright_browser_interface:
|
# Screenshots and other info only needed on requesting a step (POST)
|
||||||
print("Starting connection with playwright")
|
try:
|
||||||
logging.debug("browser_steps.py connecting")
|
state = browsersteps_sessions[browsersteps_session_id]['browserstepper'].get_current_state()
|
||||||
|
except playwright._impl._api_types.Error as e:
|
||||||
|
return make_response("Browser session ran out of time :( Please reload this page."+str(e), 401)
|
||||||
|
|
||||||
global browsersteps_playwright_browser_interface_context
|
# Use send_file() which is way faster than read/write loop on bytes
|
||||||
from . import nonContext
|
import json
|
||||||
browsersteps_playwright_browser_interface_context = nonContext.c_sync_playwright()
|
from tempfile import mkstemp
|
||||||
browsersteps_playwright_browser_interface = browsersteps_playwright_browser_interface_context.start()
|
from flask import send_file
|
||||||
|
tmp_fd, tmp_file = mkstemp(text=True, suffix=".json", prefix="changedetectionio-")
|
||||||
|
|
||||||
time.sleep(1)
|
output = json.dumps({'screenshot': "data:image/jpeg;base64,{}".format(
|
||||||
# At 20 minutes, some other variable is closing it
|
base64.b64encode(state[0]).decode('ascii')),
|
||||||
# @todo find out what it is and set it
|
'xpath_data': state[1],
|
||||||
seconds_keepalive = int(os.getenv('BROWSERSTEPS_MINUTES_KEEPALIVE', 10)) * 60
|
'session_age_start': browsersteps_sessions[browsersteps_session_id]['browserstepper'].age_start,
|
||||||
|
'browser_time_remaining': round(remaining)
|
||||||
|
})
|
||||||
|
|
||||||
# keep it alive for 10 seconds more than we advertise, sometimes it helps to keep it shutting down cleanly
|
with os.fdopen(tmp_fd, 'w') as f:
|
||||||
keepalive = "&timeout={}".format(((seconds_keepalive+3) * 1000))
|
f.write(output)
|
||||||
try:
|
|
||||||
browsersteps_playwright_browser_interface_browser = browsersteps_playwright_browser_interface.chromium.connect_over_cdp(
|
|
||||||
os.getenv('PLAYWRIGHT_DRIVER_URL', '') + keepalive)
|
|
||||||
except Exception as e:
|
|
||||||
if 'ECONNREFUSED' in str(e):
|
|
||||||
return make_response('Unable to start the Playwright session properly, is it running?', 401)
|
|
||||||
|
|
||||||
browsersteps_playwright_browser_interface_end_time = time.time() + (seconds_keepalive-3)
|
response = make_response(send_file(path_or_file=tmp_file,
|
||||||
print("Starting connection with playwright - done")
|
mimetype='application/json; charset=UTF-8',
|
||||||
|
etag=True))
|
||||||
if not browsersteps_live_ui_o.get(browsersteps_session_id):
|
# No longer needed
|
||||||
# Boot up a new session
|
os.unlink(tmp_file)
|
||||||
proxy_id = datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
|
||||||
proxy = None
|
|
||||||
if proxy_id:
|
|
||||||
proxy_url = datastore.proxy_list.get(proxy_id).get('url')
|
|
||||||
if proxy_url:
|
|
||||||
proxy = {'server': proxy_url}
|
|
||||||
print("Browser Steps: UUID {} Using proxy {}".format(uuid, proxy_url))
|
|
||||||
|
|
||||||
# Begin the new "Playwright Context" that re-uses the playwright interface
|
|
||||||
# Each session is a "Playwright Context" as a list, that uses the playwright interface
|
|
||||||
browsersteps_live_ui_o[browsersteps_session_id] = browser_steps.browsersteps_live_ui(
|
|
||||||
playwright_browser=browsersteps_playwright_browser_interface_browser,
|
|
||||||
proxy=proxy)
|
|
||||||
this_session = browsersteps_live_ui_o[browsersteps_session_id]
|
|
||||||
|
|
||||||
if not this_session.page:
|
|
||||||
cleanup_playwright_session()
|
|
||||||
return make_response('Browser session ran out of time :( Please reload this page.', 401)
|
|
||||||
|
|
||||||
response = None
|
|
||||||
|
|
||||||
if request.method == 'POST':
|
|
||||||
# Screenshots and other info only needed on requesting a step (POST)
|
|
||||||
try:
|
|
||||||
state = this_session.get_current_state()
|
|
||||||
except playwright._impl._api_types.Error as e:
|
|
||||||
return make_response("Browser session ran out of time :( Please reload this page."+str(e), 401)
|
|
||||||
|
|
||||||
# Use send_file() which is way faster than read/write loop on bytes
|
|
||||||
import json
|
|
||||||
from tempfile import mkstemp
|
|
||||||
from flask import send_file
|
|
||||||
tmp_fd, tmp_file = mkstemp(text=True, suffix=".json", prefix="changedetectionio-")
|
|
||||||
|
|
||||||
output = json.dumps({'screenshot': "data:image/jpeg;base64,{}".format(
|
|
||||||
base64.b64encode(state[0]).decode('ascii')),
|
|
||||||
'xpath_data': state[1],
|
|
||||||
'session_age_start': this_session.age_start,
|
|
||||||
'browser_time_remaining': round(remaining)
|
|
||||||
})
|
|
||||||
|
|
||||||
with os.fdopen(tmp_fd, 'w') as f:
|
|
||||||
f.write(output)
|
|
||||||
|
|
||||||
response = make_response(send_file(path_or_file=tmp_file,
|
|
||||||
mimetype='application/json; charset=UTF-8',
|
|
||||||
etag=True))
|
|
||||||
# No longer needed
|
|
||||||
os.unlink(tmp_file)
|
|
||||||
|
|
||||||
elif request.method == 'GET':
|
|
||||||
# Just enough to get the session rolling, it will call for goto-site via POST next
|
|
||||||
response = make_response({
|
|
||||||
'session_age_start': this_session.age_start,
|
|
||||||
'browser_time_remaining': round(remaining)
|
|
||||||
})
|
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|||||||
@@ -71,10 +71,10 @@ class steppable_browser_interface():
|
|||||||
optional_value = str(jinja2_env.from_string(optional_value).render())
|
optional_value = str(jinja2_env.from_string(optional_value).render())
|
||||||
|
|
||||||
action_handler(selector, optional_value)
|
action_handler(selector, optional_value)
|
||||||
self.page.wait_for_timeout(3 * 1000)
|
self.page.wait_for_timeout(1.5 * 1000)
|
||||||
print("Call action done in", time.time() - now)
|
print("Call action done in", time.time() - now)
|
||||||
|
|
||||||
def action_goto_url(self, selector, value):
|
def action_goto_url(self, selector=None, value=None):
|
||||||
# self.page.set_viewport_size({"width": 1280, "height": 5000})
|
# self.page.set_viewport_size({"width": 1280, "height": 5000})
|
||||||
now = time.time()
|
now = time.time()
|
||||||
response = self.page.goto(value, timeout=0, wait_until='commit')
|
response = self.page.goto(value, timeout=0, wait_until='commit')
|
||||||
@@ -105,7 +105,8 @@ class steppable_browser_interface():
|
|||||||
print("Clicking element")
|
print("Clicking element")
|
||||||
if not len(selector.strip()):
|
if not len(selector.strip()):
|
||||||
return
|
return
|
||||||
self.page.click(selector, timeout=10 * 1000, delay=randint(200, 500))
|
|
||||||
|
self.page.click(selector=selector, timeout=30 * 1000, delay=randint(200, 500))
|
||||||
|
|
||||||
def action_click_element_if_exists(self, selector, value):
|
def action_click_element_if_exists(self, selector, value):
|
||||||
import playwright._impl._api_types as _api_types
|
import playwright._impl._api_types as _api_types
|
||||||
@@ -132,18 +133,18 @@ class steppable_browser_interface():
|
|||||||
self.page.wait_for_timeout(1000)
|
self.page.wait_for_timeout(1000)
|
||||||
|
|
||||||
def action_wait_for_seconds(self, selector, value):
|
def action_wait_for_seconds(self, selector, value):
|
||||||
self.page.wait_for_timeout(int(value) * 1000)
|
self.page.wait_for_timeout(float(value.strip()) * 1000)
|
||||||
|
|
||||||
def action_wait_for_text(self, selector, value):
|
def action_wait_for_text(self, selector, value):
|
||||||
import json
|
import json
|
||||||
v = json.dumps(value)
|
v = json.dumps(value)
|
||||||
self.page.wait_for_function(f'document.querySelector("body").innerText.includes({v});', timeout=30000)
|
self.page.wait_for_function(f'document.querySelector("body").innerText.includes({v});', timeout=90000)
|
||||||
|
|
||||||
def action_wait_for_text_in_element(self, selector, value):
|
def action_wait_for_text_in_element(self, selector, value):
|
||||||
import json
|
import json
|
||||||
s = json.dumps(selector)
|
s = json.dumps(selector)
|
||||||
v = json.dumps(value)
|
v = json.dumps(value)
|
||||||
self.page.wait_for_function(f'document.querySelector({s}).innerText.includes({v});', timeout=30000)
|
self.page.wait_for_function(f'document.querySelector({s}).innerText.includes({v});', timeout=90000)
|
||||||
|
|
||||||
# @todo - in the future make some popout interface to capture what needs to be set
|
# @todo - in the future make some popout interface to capture what needs to be set
|
||||||
# https://playwright.dev/python/docs/api/class-keyboard
|
# https://playwright.dev/python/docs/api/class-keyboard
|
||||||
|
|||||||
118
changedetectionio/blueprint/check_proxies/__init__.py
Normal file
118
changedetectionio/blueprint/check_proxies/__init__.py
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
from flask import Blueprint
|
||||||
|
from flask_login import login_required
|
||||||
|
|
||||||
|
from changedetectionio.processors import text_json_diff
|
||||||
|
from changedetectionio.store import ChangeDetectionStore
|
||||||
|
|
||||||
|
|
||||||
|
STATUS_CHECKING = 0
|
||||||
|
STATUS_FAILED = 1
|
||||||
|
STATUS_OK = 2
|
||||||
|
THREADPOOL_MAX_WORKERS = 3
|
||||||
|
_DEFAULT_POOL = ThreadPoolExecutor(max_workers=THREADPOOL_MAX_WORKERS)
|
||||||
|
|
||||||
|
|
||||||
|
# Maybe use fetch-time if its >5 to show some expected load time?
|
||||||
|
def threadpool(f, executor=None):
|
||||||
|
@wraps(f)
|
||||||
|
def wrap(*args, **kwargs):
|
||||||
|
return (executor or _DEFAULT_POOL).submit(f, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrap
|
||||||
|
|
||||||
|
|
||||||
|
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||||
|
check_proxies_blueprint = Blueprint('check_proxies', __name__)
|
||||||
|
checks_in_progress = {}
|
||||||
|
|
||||||
|
@threadpool
|
||||||
|
def long_task(uuid, preferred_proxy):
|
||||||
|
import time
|
||||||
|
from changedetectionio import content_fetcher
|
||||||
|
|
||||||
|
status = {'status': '', 'length': 0, 'text': ''}
|
||||||
|
from jinja2 import Environment, BaseLoader
|
||||||
|
|
||||||
|
contents = ''
|
||||||
|
now = time.time()
|
||||||
|
try:
|
||||||
|
update_handler = text_json_diff.perform_site_check(datastore=datastore)
|
||||||
|
changed_detected, update_obj, contents = update_handler.run(uuid, preferred_proxy=preferred_proxy, skip_when_checksum_same=False)
|
||||||
|
# title, size is len contents not len xfer
|
||||||
|
except content_fetcher.Non200ErrorCodeReceived as e:
|
||||||
|
if e.status_code == 404:
|
||||||
|
status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but 404 (page not found)"})
|
||||||
|
elif e.status_code == 403 or e.status_code == 401:
|
||||||
|
status.update({'status': 'ERROR', 'length': len(contents), 'text': f"{e.status_code} - Access denied"})
|
||||||
|
else:
|
||||||
|
status.update({'status': 'ERROR', 'length': len(contents), 'text': f"Status code: {e.status_code}"})
|
||||||
|
except text_json_diff.FilterNotFoundInResponse:
|
||||||
|
status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but CSS/xPath filter not found (page changed layout?)"})
|
||||||
|
except content_fetcher.EmptyReply as e:
|
||||||
|
if e.status_code == 403 or e.status_code == 401:
|
||||||
|
status.update({'status': 'ERROR OTHER', 'length': len(contents), 'text': f"Got empty reply with code {e.status_code} - Access denied"})
|
||||||
|
else:
|
||||||
|
status.update({'status': 'ERROR OTHER', 'length': len(contents) if contents else 0, 'text': f"Empty reply with code {e.status_code}, needs chrome?"})
|
||||||
|
except content_fetcher.ReplyWithContentButNoText as e:
|
||||||
|
txt = f"Got reply but with no content - Status code {e.status_code} - It's possible that the filters were found, but contained no usable text (or contained only an image)."
|
||||||
|
status.update({'status': 'ERROR', 'text': txt})
|
||||||
|
except Exception as e:
|
||||||
|
status.update({'status': 'ERROR OTHER', 'length': len(contents) if contents else 0, 'text': 'Error: '+type(e).__name__+str(e)})
|
||||||
|
else:
|
||||||
|
status.update({'status': 'OK', 'length': len(contents), 'text': ''})
|
||||||
|
|
||||||
|
if status.get('text'):
|
||||||
|
status['text'] = Environment(loader=BaseLoader()).from_string('{{text|e}}').render({'text': status['text']})
|
||||||
|
|
||||||
|
status['time'] = "{:.2f}s".format(time.time() - now)
|
||||||
|
|
||||||
|
return status
|
||||||
|
|
||||||
|
def _recalc_check_status(uuid):
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
for k, v in checks_in_progress.get(uuid, {}).items():
|
||||||
|
try:
|
||||||
|
r_1 = v.result(timeout=0.05)
|
||||||
|
except Exception as e:
|
||||||
|
# If timeout error?
|
||||||
|
results[k] = {'status': 'RUNNING'}
|
||||||
|
|
||||||
|
else:
|
||||||
|
results[k] = r_1
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
@login_required
|
||||||
|
@check_proxies_blueprint.route("/<string:uuid>/status", methods=['GET'])
|
||||||
|
def get_recheck_status(uuid):
|
||||||
|
results = _recalc_check_status(uuid=uuid)
|
||||||
|
return results
|
||||||
|
|
||||||
|
@login_required
|
||||||
|
@check_proxies_blueprint.route("/<string:uuid>/start", methods=['GET'])
|
||||||
|
def start_check(uuid):
|
||||||
|
|
||||||
|
if not datastore.proxy_list:
|
||||||
|
return
|
||||||
|
|
||||||
|
if checks_in_progress.get(uuid):
|
||||||
|
state = _recalc_check_status(uuid=uuid)
|
||||||
|
for proxy_key, v in state.items():
|
||||||
|
if v.get('status') == 'RUNNING':
|
||||||
|
return state
|
||||||
|
else:
|
||||||
|
checks_in_progress[uuid] = {}
|
||||||
|
|
||||||
|
for k, v in datastore.proxy_list.items():
|
||||||
|
if not checks_in_progress[uuid].get(k):
|
||||||
|
checks_in_progress[uuid][k] = long_task(uuid=uuid, preferred_proxy=k)
|
||||||
|
|
||||||
|
results = _recalc_check_status(uuid=uuid)
|
||||||
|
return results
|
||||||
|
|
||||||
|
return check_proxies_blueprint
|
||||||
9
changedetectionio/blueprint/tags/README.md
Normal file
9
changedetectionio/blueprint/tags/README.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Groups tags
|
||||||
|
|
||||||
|
## How it works
|
||||||
|
|
||||||
|
Watch has a list() of tag UUID's, which relate to a config under application.settings.tags
|
||||||
|
|
||||||
|
The 'tag' is actually a watch, because they basically will eventually share 90% of the same config.
|
||||||
|
|
||||||
|
So a tag is like an abstract of a watch
|
||||||
141
changedetectionio/blueprint/tags/__init__.py
Normal file
141
changedetectionio/blueprint/tags/__init__.py
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
from flask import Blueprint, request, make_response, render_template, flash, url_for, redirect
|
||||||
|
from changedetectionio.store import ChangeDetectionStore
|
||||||
|
from changedetectionio import login_optionally_required
|
||||||
|
|
||||||
|
|
||||||
|
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||||
|
tags_blueprint = Blueprint('tags', __name__, template_folder="templates")
|
||||||
|
|
||||||
|
@tags_blueprint.route("/list", methods=['GET'])
|
||||||
|
@login_optionally_required
|
||||||
|
def tags_overview_page():
|
||||||
|
from .form import SingleTag
|
||||||
|
add_form = SingleTag(request.form)
|
||||||
|
output = render_template("groups-overview.html",
|
||||||
|
form=add_form,
|
||||||
|
available_tags=datastore.data['settings']['application'].get('tags', {}),
|
||||||
|
)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
@tags_blueprint.route("/add", methods=['POST'])
|
||||||
|
@login_optionally_required
|
||||||
|
def form_tag_add():
|
||||||
|
from .form import SingleTag
|
||||||
|
add_form = SingleTag(request.form)
|
||||||
|
|
||||||
|
if not add_form.validate():
|
||||||
|
for widget, l in add_form.errors.items():
|
||||||
|
flash(','.join(l), 'error')
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
title = request.form.get('name').strip()
|
||||||
|
|
||||||
|
if datastore.tag_exists_by_name(title):
|
||||||
|
flash(f'The tag "{title}" already exists', "error")
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
datastore.add_tag(title)
|
||||||
|
flash("Tag added")
|
||||||
|
|
||||||
|
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
@tags_blueprint.route("/mute/<string:uuid>", methods=['GET'])
|
||||||
|
@login_optionally_required
|
||||||
|
def mute(uuid):
|
||||||
|
if datastore.data['settings']['application']['tags'].get(uuid):
|
||||||
|
datastore.data['settings']['application']['tags'][uuid]['notification_muted'] = not datastore.data['settings']['application']['tags'][uuid]['notification_muted']
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
@tags_blueprint.route("/delete/<string:uuid>", methods=['GET'])
|
||||||
|
@login_optionally_required
|
||||||
|
def delete(uuid):
|
||||||
|
removed = 0
|
||||||
|
# Delete the tag, and any tag reference
|
||||||
|
if datastore.data['settings']['application']['tags'].get(uuid):
|
||||||
|
del datastore.data['settings']['application']['tags'][uuid]
|
||||||
|
|
||||||
|
for watch_uuid, watch in datastore.data['watching'].items():
|
||||||
|
if watch.get('tags') and uuid in watch['tags']:
|
||||||
|
removed += 1
|
||||||
|
watch['tags'].remove(uuid)
|
||||||
|
|
||||||
|
flash(f"Tag deleted and removed from {removed} watches")
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
@tags_blueprint.route("/unlink/<string:uuid>", methods=['GET'])
|
||||||
|
@login_optionally_required
|
||||||
|
def unlink(uuid):
|
||||||
|
unlinked = 0
|
||||||
|
for watch_uuid, watch in datastore.data['watching'].items():
|
||||||
|
if watch.get('tags') and uuid in watch['tags']:
|
||||||
|
unlinked += 1
|
||||||
|
watch['tags'].remove(uuid)
|
||||||
|
|
||||||
|
flash(f"Tag unlinked removed from {unlinked} watches")
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
@tags_blueprint.route("/delete_all", methods=['GET'])
|
||||||
|
@login_optionally_required
|
||||||
|
def delete_all():
|
||||||
|
for watch_uuid, watch in datastore.data['watching'].items():
|
||||||
|
watch['tags'] = []
|
||||||
|
datastore.data['settings']['application']['tags'] = {}
|
||||||
|
|
||||||
|
flash(f"All tags deleted")
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
@tags_blueprint.route("/edit/<string:uuid>", methods=['GET'])
|
||||||
|
@login_optionally_required
|
||||||
|
def form_tag_edit(uuid):
|
||||||
|
from changedetectionio import forms
|
||||||
|
|
||||||
|
if uuid == 'first':
|
||||||
|
uuid = list(datastore.data['settings']['application']['tags'].keys()).pop()
|
||||||
|
|
||||||
|
default = datastore.data['settings']['application']['tags'].get(uuid)
|
||||||
|
|
||||||
|
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||||
|
data=default,
|
||||||
|
)
|
||||||
|
form.datastore=datastore # needed?
|
||||||
|
|
||||||
|
output = render_template("edit-tag.html",
|
||||||
|
data=default,
|
||||||
|
form=form,
|
||||||
|
settings_application=datastore.data['settings']['application'],
|
||||||
|
)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
@tags_blueprint.route("/edit/<string:uuid>", methods=['POST'])
|
||||||
|
@login_optionally_required
|
||||||
|
def form_tag_edit_submit(uuid):
|
||||||
|
from changedetectionio import forms
|
||||||
|
if uuid == 'first':
|
||||||
|
uuid = list(datastore.data['settings']['application']['tags'].keys()).pop()
|
||||||
|
|
||||||
|
default = datastore.data['settings']['application']['tags'].get(uuid)
|
||||||
|
|
||||||
|
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||||
|
data=default,
|
||||||
|
)
|
||||||
|
# @todo subclass form so validation works
|
||||||
|
#if not form.validate():
|
||||||
|
# for widget, l in form.errors.items():
|
||||||
|
# flash(','.join(l), 'error')
|
||||||
|
# return redirect(url_for('tags.form_tag_edit_submit', uuid=uuid))
|
||||||
|
|
||||||
|
datastore.data['settings']['application']['tags'][uuid].update(form.data)
|
||||||
|
datastore.needs_write_urgent = True
|
||||||
|
flash("Updated")
|
||||||
|
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
|
||||||
|
|
||||||
|
@tags_blueprint.route("/delete/<string:uuid>", methods=['GET'])
|
||||||
|
def form_tag_delete(uuid):
|
||||||
|
return redirect(url_for('tags.tags_overview_page'))
|
||||||
|
return tags_blueprint
|
||||||
22
changedetectionio/blueprint/tags/form.py
Normal file
22
changedetectionio/blueprint/tags/form.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
from wtforms import (
|
||||||
|
BooleanField,
|
||||||
|
Form,
|
||||||
|
IntegerField,
|
||||||
|
RadioField,
|
||||||
|
SelectField,
|
||||||
|
StringField,
|
||||||
|
SubmitField,
|
||||||
|
TextAreaField,
|
||||||
|
validators,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class SingleTag(Form):
|
||||||
|
|
||||||
|
name = StringField('Tag name', [validators.InputRequired()], render_kw={"placeholder": "Name"})
|
||||||
|
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
135
changedetectionio/blueprint/tags/templates/edit-tag.html
Normal file
135
changedetectionio/blueprint/tags/templates/edit-tag.html
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% block content %}
|
||||||
|
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
||||||
|
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||||
|
<script>
|
||||||
|
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||||
|
<script>
|
||||||
|
|
||||||
|
/*{% if emailprefix %}*/
|
||||||
|
/*const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');*/
|
||||||
|
/*{% endif %}*/
|
||||||
|
|
||||||
|
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script>
|
||||||
|
<!--<script src="{{url_for('static_content', group='js', filename='limit.js')}}" defer></script>-->
|
||||||
|
<script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||||
|
|
||||||
|
<div class="edit-form monospaced-textarea">
|
||||||
|
|
||||||
|
<div class="tabs collapsable">
|
||||||
|
<ul>
|
||||||
|
<li class="tab" id=""><a href="#general">General</a></li>
|
||||||
|
<li class="tab"><a href="#filters-and-triggers">Filters & Triggers</a></li>
|
||||||
|
<li class="tab"><a href="#notifications">Notifications</a></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="box-wrap inner">
|
||||||
|
<form class="pure-form pure-form-stacked"
|
||||||
|
action="{{ url_for('tags.form_tag_edit', uuid=data.uuid) }}" method="POST">
|
||||||
|
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||||
|
|
||||||
|
<div class="tab-pane-inner" id="general">
|
||||||
|
<fieldset>
|
||||||
|
<div class="pure-control-group">
|
||||||
|
{{ render_field(form.title, placeholder="https://...", required=true, class="m-d") }}
|
||||||
|
</div>
|
||||||
|
</fieldset>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="tab-pane-inner" id="filters-and-triggers">
|
||||||
|
<div class="pure-control-group">
|
||||||
|
{% set field = render_field(form.include_filters,
|
||||||
|
rows=5,
|
||||||
|
placeholder="#example
|
||||||
|
xpath://body/div/span[contains(@class, 'example-class')]",
|
||||||
|
class="m-d")
|
||||||
|
%}
|
||||||
|
{{ field }}
|
||||||
|
{% if '/text()' in field %}
|
||||||
|
<span class="pure-form-message-inline"><strong>Note!: //text() function does not work where the <element> contains <![CDATA[]]></strong></span><br>
|
||||||
|
{% endif %}
|
||||||
|
<span class="pure-form-message-inline">One rule per line, <i>any</i> rules that matches will be used.<br>
|
||||||
|
|
||||||
|
<ul>
|
||||||
|
<li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li>
|
||||||
|
<li>JSON - Limit text to this JSON rule, using either <a href="https://pypi.org/project/jsonpath-ng/" target="new">JSONPath</a> or <a href="https://stedolan.github.io/jq/" target="new">jq</a> (if installed).
|
||||||
|
<ul>
|
||||||
|
<li>JSONPath: Prefix with <code>json:</code>, use <code>json:$</code> to force re-formatting if required, <a href="https://jsonpath.com/" target="new">test your JSONPath here</a>.</li>
|
||||||
|
{% if jq_support %}
|
||||||
|
<li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>.</li>
|
||||||
|
{% else %}
|
||||||
|
<li>jq support not installed</li>
|
||||||
|
{% endif %}
|
||||||
|
</ul>
|
||||||
|
</li>
|
||||||
|
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash,
|
||||||
|
<ul>
|
||||||
|
<li>Example: <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
|
||||||
|
href="http://xpather.com/" target="new">test your XPath here</a></li>
|
||||||
|
<li>Example: Get all titles from an RSS feed <code>//title/text()</code></li>
|
||||||
|
</ul>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
Please be sure that you thoroughly understand how to write CSS, JSONPath, XPath{% if jq_support %}, or jq selector{%endif%} rules before filing an issue on GitHub! <a
|
||||||
|
href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<fieldset class="pure-control-group">
|
||||||
|
{{ render_field(form.subtractive_selectors, rows=5, placeholder="header
|
||||||
|
footer
|
||||||
|
nav
|
||||||
|
.stockticker") }}
|
||||||
|
<span class="pure-form-message-inline">
|
||||||
|
<ul>
|
||||||
|
<li> Remove HTML element(s) by CSS selector before text conversion. </li>
|
||||||
|
<li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li>
|
||||||
|
</ul>
|
||||||
|
</span>
|
||||||
|
</fieldset>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="tab-pane-inner" id="notifications">
|
||||||
|
<fieldset>
|
||||||
|
<div class="pure-control-group inline-radio">
|
||||||
|
{{ render_checkbox_field(form.notification_muted) }}
|
||||||
|
</div>
|
||||||
|
{% if is_html_webdriver %}
|
||||||
|
<div class="pure-control-group inline-radio">
|
||||||
|
{{ render_checkbox_field(form.notification_screenshot) }}
|
||||||
|
<span class="pure-form-message-inline">
|
||||||
|
<strong>Use with caution!</strong> This will easily fill up your email storage quota or flood other storages.
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<div class="field-group" id="notification-field-group">
|
||||||
|
{% if has_default_notification_urls %}
|
||||||
|
<div class="inline-warning">
|
||||||
|
<img class="inline-warning-icon" src="{{url_for('static_content', group='images', filename='notice.svg')}}" alt="Look out!" title="Lookout!" >
|
||||||
|
There are <a href="{{ url_for('settings_page')}}#notifications">system-wide notification URLs enabled</a>, this form will override notification settings for this watch only ‐ an empty Notification URL list here will still send notifications.
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<a href="#notifications" id="notification-setting-reset-to-default" class="pure-button button-xsmall" style="right: 20px; top: 20px; position: absolute; background-color: #5f42dd; border-radius: 4px; font-size: 70%; color: #fff">Use system defaults</a>
|
||||||
|
|
||||||
|
{{ render_common_settings_form(form, emailprefix, settings_application) }}
|
||||||
|
</div>
|
||||||
|
</fieldset>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="actions">
|
||||||
|
<div class="pure-control-group">
|
||||||
|
{{ render_button(form.save_button) }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
@@ -0,0 +1,60 @@
|
|||||||
|
{% extends 'base.html' %}
|
||||||
|
{% block content %}
|
||||||
|
{% from '_helpers.jinja' import render_simple_field, render_field %}
|
||||||
|
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
||||||
|
|
||||||
|
<div class="box">
|
||||||
|
<form class="pure-form" action="{{ url_for('tags.form_tag_add') }}" method="POST" id="new-watch-form">
|
||||||
|
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||||
|
<fieldset>
|
||||||
|
<legend>Add a new organisational tag</legend>
|
||||||
|
<div id="watch-add-wrapper-zone">
|
||||||
|
<div>
|
||||||
|
{{ render_simple_field(form.name, placeholder="watch label / tag") }}
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
{{ render_simple_field(form.save_button, title="Save" ) }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<br>
|
||||||
|
<div style="color: #fff;">Groups allows you to manage filters and notifications for multiple watches under a single organisational tag.</div>
|
||||||
|
</fieldset>
|
||||||
|
</form>
|
||||||
|
<!-- @todo maybe some overview matrix, 'tick' with which has notification, filter rules etc -->
|
||||||
|
<div id="watch-table-wrapper">
|
||||||
|
|
||||||
|
<table class="pure-table pure-table-striped watch-table group-overview-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th></th>
|
||||||
|
<th>Tag / Label name</th>
|
||||||
|
<th></th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<!--
|
||||||
|
@Todo - connect Last checked, Last Changed, Number of Watches etc
|
||||||
|
--->
|
||||||
|
{% if not available_tags|length %}
|
||||||
|
<tr>
|
||||||
|
<td colspan="3">No website organisational tags/groups configured</td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
|
{% for uuid, tag in available_tags.items() %}
|
||||||
|
<tr id="{{ uuid }}" class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }}">
|
||||||
|
<td class="watch-controls">
|
||||||
|
<a class="link-mute state-{{'on' if tag.notification_muted else 'off'}}" href="{{url_for('tags.mute', uuid=tag.uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications" class="icon icon-mute" ></a>
|
||||||
|
</td>
|
||||||
|
<td class="title-col inline">{{tag.title}}</td>
|
||||||
|
<td>
|
||||||
|
<a class="pure-button pure-button-primary" href="{{ url_for('tags.form_tag_edit', uuid=uuid) }}">Edit</a>
|
||||||
|
<a class="pure-button pure-button-primary" href="{{ url_for('tags.delete', uuid=uuid) }}" title="Deletes and removes tag">Delete</a>
|
||||||
|
<a class="pure-button pure-button-primary" href="{{ url_for('tags.unlink', uuid=uuid) }}" title="Keep the tag but unlink any watches">Unlink</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
@@ -10,6 +10,7 @@ import time
|
|||||||
|
|
||||||
visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4, header, footer, section, article, aside, details, main, nav, section, summary'
|
visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4, header, footer, section, article, aside, details, main, nav, section, summary'
|
||||||
|
|
||||||
|
|
||||||
class Non200ErrorCodeReceived(Exception):
|
class Non200ErrorCodeReceived(Exception):
|
||||||
def __init__(self, status_code, url, screenshot=None, xpath_data=None, page_html=None):
|
def __init__(self, status_code, url, screenshot=None, xpath_data=None, page_html=None):
|
||||||
# Set this so we can use it in other parts of the app
|
# Set this so we can use it in other parts of the app
|
||||||
@@ -24,10 +25,12 @@ class Non200ErrorCodeReceived(Exception):
|
|||||||
self.page_text = html_tools.html_to_text(page_html)
|
self.page_text = html_tools.html_to_text(page_html)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
class checksumFromPreviousCheckWasTheSame(Exception):
|
class checksumFromPreviousCheckWasTheSame(Exception):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
class JSActionExceptions(Exception):
|
class JSActionExceptions(Exception):
|
||||||
def __init__(self, status_code, url, screenshot, message=''):
|
def __init__(self, status_code, url, screenshot, message=''):
|
||||||
self.status_code = status_code
|
self.status_code = status_code
|
||||||
@@ -36,6 +39,7 @@ class JSActionExceptions(Exception):
|
|||||||
self.message = message
|
self.message = message
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
class BrowserStepsStepTimout(Exception):
|
class BrowserStepsStepTimout(Exception):
|
||||||
def __init__(self, step_n):
|
def __init__(self, step_n):
|
||||||
self.step_n = step_n
|
self.step_n = step_n
|
||||||
@@ -51,6 +55,7 @@ class PageUnloadable(Exception):
|
|||||||
self.message = message
|
self.message = message
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
class EmptyReply(Exception):
|
class EmptyReply(Exception):
|
||||||
def __init__(self, status_code, url, screenshot=None):
|
def __init__(self, status_code, url, screenshot=None):
|
||||||
# Set this so we can use it in other parts of the app
|
# Set this so we can use it in other parts of the app
|
||||||
@@ -59,6 +64,7 @@ class EmptyReply(Exception):
|
|||||||
self.screenshot = screenshot
|
self.screenshot = screenshot
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
class ScreenshotUnavailable(Exception):
|
class ScreenshotUnavailable(Exception):
|
||||||
def __init__(self, status_code, url, page_html=None):
|
def __init__(self, status_code, url, page_html=None):
|
||||||
# Set this so we can use it in other parts of the app
|
# Set this so we can use it in other parts of the app
|
||||||
@@ -69,21 +75,25 @@ class ScreenshotUnavailable(Exception):
|
|||||||
self.page_text = html_to_text(page_html)
|
self.page_text = html_to_text(page_html)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
class ReplyWithContentButNoText(Exception):
|
class ReplyWithContentButNoText(Exception):
|
||||||
def __init__(self, status_code, url, screenshot=None):
|
def __init__(self, status_code, url, screenshot=None, has_filters=False, html_content=''):
|
||||||
# Set this so we can use it in other parts of the app
|
# Set this so we can use it in other parts of the app
|
||||||
self.status_code = status_code
|
self.status_code = status_code
|
||||||
self.url = url
|
self.url = url
|
||||||
self.screenshot = screenshot
|
self.screenshot = screenshot
|
||||||
|
self.has_filters = has_filters
|
||||||
|
self.html_content = html_content
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
class Fetcher():
|
class Fetcher():
|
||||||
browser_steps = None
|
browser_steps = None
|
||||||
browser_steps_screenshot_path = None
|
browser_steps_screenshot_path = None
|
||||||
content = None
|
content = None
|
||||||
error = None
|
error = None
|
||||||
fetcher_description = "No description"
|
fetcher_description = "No description"
|
||||||
headers = None
|
headers = {}
|
||||||
status_code = None
|
status_code = None
|
||||||
webdriver_js_execute_code = None
|
webdriver_js_execute_code = None
|
||||||
xpath_data = None
|
xpath_data = None
|
||||||
@@ -105,7 +115,6 @@ class Fetcher():
|
|||||||
self.xpath_element_js = resource_string(__name__, "res/xpath_element_scraper.js").decode('utf-8')
|
self.xpath_element_js = resource_string(__name__, "res/xpath_element_scraper.js").decode('utf-8')
|
||||||
self.instock_data_js = resource_string(__name__, "res/stock-not-in-stock.js").decode('utf-8')
|
self.instock_data_js = resource_string(__name__, "res/stock-not-in-stock.js").decode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_error(self):
|
def get_error(self):
|
||||||
return self.error
|
return self.error
|
||||||
@@ -140,6 +149,13 @@ class Fetcher():
|
|||||||
def is_ready(self):
|
def is_ready(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def get_all_headers(self):
|
||||||
|
"""
|
||||||
|
Get all headers but ensure all keys are lowercase
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
return {k.lower(): v for k, v in self.headers.items()}
|
||||||
|
|
||||||
def iterate_browser_steps(self):
|
def iterate_browser_steps(self):
|
||||||
from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface
|
from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface
|
||||||
from playwright._impl._api_types import TimeoutError
|
from playwright._impl._api_types import TimeoutError
|
||||||
@@ -152,13 +168,15 @@ class Fetcher():
|
|||||||
interface = steppable_browser_interface()
|
interface = steppable_browser_interface()
|
||||||
interface.page = self.page
|
interface.page = self.page
|
||||||
|
|
||||||
valid_steps = filter(lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'), self.browser_steps)
|
valid_steps = filter(
|
||||||
|
lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
|
||||||
|
self.browser_steps)
|
||||||
|
|
||||||
for step in valid_steps:
|
for step in valid_steps:
|
||||||
step_n += 1
|
step_n += 1
|
||||||
print(">> Iterating check - browser Step n {} - {}...".format(step_n, step['operation']))
|
print(">> Iterating check - browser Step n {} - {}...".format(step_n, step['operation']))
|
||||||
self.screenshot_step("before-"+str(step_n))
|
self.screenshot_step("before-" + str(step_n))
|
||||||
self.save_step_html("before-"+str(step_n))
|
self.save_step_html("before-" + str(step_n))
|
||||||
try:
|
try:
|
||||||
optional_value = step['optional_value']
|
optional_value = step['optional_value']
|
||||||
selector = step['selector']
|
selector = step['selector']
|
||||||
@@ -173,12 +191,11 @@ class Fetcher():
|
|||||||
optional_value=optional_value)
|
optional_value=optional_value)
|
||||||
self.screenshot_step(step_n)
|
self.screenshot_step(step_n)
|
||||||
self.save_step_html(step_n)
|
self.save_step_html(step_n)
|
||||||
except TimeoutError:
|
except TimeoutError as e:
|
||||||
|
print(str(e))
|
||||||
# Stop processing here
|
# Stop processing here
|
||||||
raise BrowserStepsStepTimout(step_n=step_n)
|
raise BrowserStepsStepTimout(step_n=step_n)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# It's always good to reset these
|
# It's always good to reset these
|
||||||
def delete_browser_steps_screenshots(self):
|
def delete_browser_steps_screenshots(self):
|
||||||
import glob
|
import glob
|
||||||
@@ -186,7 +203,9 @@ class Fetcher():
|
|||||||
dest = os.path.join(self.browser_steps_screenshot_path, 'step_*.jpeg')
|
dest = os.path.join(self.browser_steps_screenshot_path, 'step_*.jpeg')
|
||||||
files = glob.glob(dest)
|
files = glob.glob(dest)
|
||||||
for f in files:
|
for f in files:
|
||||||
os.unlink(f)
|
if os.path.isfile(f):
|
||||||
|
os.unlink(f)
|
||||||
|
|
||||||
|
|
||||||
# Maybe for the future, each fetcher provides its own diff output, could be used for text, image
|
# Maybe for the future, each fetcher provides its own diff output, could be used for text, image
|
||||||
# the current one would return javascript output (as we use JS to generate the diff)
|
# the current one would return javascript output (as we use JS to generate the diff)
|
||||||
@@ -205,6 +224,7 @@ def available_fetchers():
|
|||||||
|
|
||||||
return p
|
return p
|
||||||
|
|
||||||
|
|
||||||
class base_html_playwright(Fetcher):
|
class base_html_playwright(Fetcher):
|
||||||
fetcher_description = "Playwright {}/Javascript".format(
|
fetcher_description = "Playwright {}/Javascript".format(
|
||||||
os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').capitalize()
|
os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').capitalize()
|
||||||
@@ -268,6 +288,119 @@ class base_html_playwright(Fetcher):
|
|||||||
with open(destination, 'w') as f:
|
with open(destination, 'w') as f:
|
||||||
f.write(content)
|
f.write(content)
|
||||||
|
|
||||||
|
def run_fetch_browserless_puppeteer(self,
|
||||||
|
url,
|
||||||
|
timeout,
|
||||||
|
request_headers,
|
||||||
|
request_body,
|
||||||
|
request_method,
|
||||||
|
ignore_status_codes=False,
|
||||||
|
current_include_filters=None,
|
||||||
|
is_binary=False):
|
||||||
|
|
||||||
|
from pkg_resources import resource_string
|
||||||
|
|
||||||
|
extra_wait_ms = (int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay) * 1000
|
||||||
|
|
||||||
|
self.xpath_element_js = self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors)
|
||||||
|
code = resource_string(__name__, "res/puppeteer_fetch.js").decode('utf-8')
|
||||||
|
# In the future inject this is a proper JS package
|
||||||
|
code = code.replace('%xpath_scrape_code%', self.xpath_element_js)
|
||||||
|
code = code.replace('%instock_scrape_code%', self.instock_data_js)
|
||||||
|
|
||||||
|
from requests.exceptions import ConnectTimeout, ReadTimeout
|
||||||
|
wait_browserless_seconds = 240
|
||||||
|
|
||||||
|
browserless_function_url = os.getenv('BROWSERLESS_FUNCTION_URL')
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
if not browserless_function_url:
|
||||||
|
# Convert/try to guess from PLAYWRIGHT_DRIVER_URL
|
||||||
|
o = urlparse(os.getenv('PLAYWRIGHT_DRIVER_URL'))
|
||||||
|
browserless_function_url = o._replace(scheme="http")._replace(path="function").geturl()
|
||||||
|
|
||||||
|
|
||||||
|
# Append proxy connect string
|
||||||
|
if self.proxy:
|
||||||
|
import urllib.parse
|
||||||
|
# Remove username/password if it exists in the URL or you will receive "ERR_NO_SUPPORTED_PROXIES" error
|
||||||
|
# Actual authentication handled by Puppeteer/node
|
||||||
|
o = urlparse(self.proxy.get('server'))
|
||||||
|
proxy_url = urllib.parse.quote(o._replace(netloc="{}:{}".format(o.hostname, o.port)).geturl())
|
||||||
|
browserless_function_url = f"{browserless_function_url}&--proxy-server={proxy_url}&dumpio=true"
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
amp = '&' if '?' in browserless_function_url else '?'
|
||||||
|
response = requests.request(
|
||||||
|
method="POST",
|
||||||
|
json={
|
||||||
|
"code": code,
|
||||||
|
"context": {
|
||||||
|
# Very primitive disk cache - USE WITH EXTREME CAUTION
|
||||||
|
# Run browserless container with -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]"
|
||||||
|
'disk_cache_dir': os.getenv("PUPPETEER_DISK_CACHE", False), # or path to disk cache ending in /, ie /tmp/cache/
|
||||||
|
'execute_js': self.webdriver_js_execute_code,
|
||||||
|
'extra_wait_ms': extra_wait_ms,
|
||||||
|
'include_filters': current_include_filters,
|
||||||
|
'req_headers': request_headers,
|
||||||
|
'screenshot_quality': int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)),
|
||||||
|
'url': url,
|
||||||
|
'user_agent': {k.lower(): v for k, v in request_headers.items()}.get('user-agent', None),
|
||||||
|
'proxy_username': self.proxy.get('username', '') if self.proxy else False,
|
||||||
|
'proxy_password': self.proxy.get('password', '') if self.proxy else False,
|
||||||
|
'no_cache_list': [
|
||||||
|
'twitter',
|
||||||
|
'.pdf'
|
||||||
|
],
|
||||||
|
# Could use https://github.com/easylist/easylist here, or install a plugin
|
||||||
|
'block_url_list': [
|
||||||
|
'adnxs.com',
|
||||||
|
'analytics.twitter.com',
|
||||||
|
'doubleclick.net',
|
||||||
|
'google-analytics.com',
|
||||||
|
'googletagmanager',
|
||||||
|
'trustpilot.com'
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
# @todo /function needs adding ws:// to http:// rebuild this
|
||||||
|
url=browserless_function_url+f"{amp}--disable-features=AudioServiceOutOfProcess&dumpio=true&--disable-remote-fonts",
|
||||||
|
timeout=wait_browserless_seconds)
|
||||||
|
|
||||||
|
except ReadTimeout:
|
||||||
|
raise PageUnloadable(url=url, status_code=None, message=f"No response from browserless in {wait_browserless_seconds}s")
|
||||||
|
except ConnectTimeout:
|
||||||
|
raise PageUnloadable(url=url, status_code=None, message=f"Timed out connecting to browserless, retrying..")
|
||||||
|
else:
|
||||||
|
# 200 Here means that the communication to browserless worked only, not the page state
|
||||||
|
if response.status_code == 200:
|
||||||
|
import base64
|
||||||
|
|
||||||
|
x = response.json()
|
||||||
|
if not x.get('screenshot'):
|
||||||
|
# https://github.com/puppeteer/puppeteer/blob/v1.0.0/docs/troubleshooting.md#tips
|
||||||
|
# https://github.com/puppeteer/puppeteer/issues/1834
|
||||||
|
# https://github.com/puppeteer/puppeteer/issues/1834#issuecomment-381047051
|
||||||
|
# Check your memory is shared and big enough
|
||||||
|
raise ScreenshotUnavailable(url=url, status_code=None)
|
||||||
|
|
||||||
|
if not x.get('content', '').strip():
|
||||||
|
raise EmptyReply(url=url, status_code=None)
|
||||||
|
|
||||||
|
if x.get('status_code', 200) != 200 and not ignore_status_codes:
|
||||||
|
raise Non200ErrorCodeReceived(url=url, status_code=x.get('status_code', 200), page_html=x['content'])
|
||||||
|
|
||||||
|
self.content = x.get('content')
|
||||||
|
self.headers = x.get('headers')
|
||||||
|
self.instock_data = x.get('instock_data')
|
||||||
|
self.screenshot = base64.b64decode(x.get('screenshot'))
|
||||||
|
self.status_code = x.get('status_code')
|
||||||
|
self.xpath_data = x.get('xpath_data')
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Some other error from browserless
|
||||||
|
raise PageUnloadable(url=url, status_code=None, message=response.content.decode('utf-8'))
|
||||||
|
|
||||||
def run(self,
|
def run(self,
|
||||||
url,
|
url,
|
||||||
timeout,
|
timeout,
|
||||||
@@ -278,6 +411,24 @@ class base_html_playwright(Fetcher):
|
|||||||
current_include_filters=None,
|
current_include_filters=None,
|
||||||
is_binary=False):
|
is_binary=False):
|
||||||
|
|
||||||
|
# For now, USE_EXPERIMENTAL_PUPPETEER_FETCH is not supported by watches with BrowserSteps (for now!)
|
||||||
|
has_browser_steps = self.browser_steps and list(filter(
|
||||||
|
lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
|
||||||
|
self.browser_steps))
|
||||||
|
|
||||||
|
if not has_browser_steps:
|
||||||
|
if os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH'):
|
||||||
|
# Temporary backup solution until we rewrite the playwright code
|
||||||
|
return self.run_fetch_browserless_puppeteer(
|
||||||
|
url,
|
||||||
|
timeout,
|
||||||
|
request_headers,
|
||||||
|
request_body,
|
||||||
|
request_method,
|
||||||
|
ignore_status_codes,
|
||||||
|
current_include_filters,
|
||||||
|
is_binary)
|
||||||
|
|
||||||
from playwright.sync_api import sync_playwright
|
from playwright.sync_api import sync_playwright
|
||||||
import playwright._impl._api_types
|
import playwright._impl._api_types
|
||||||
|
|
||||||
@@ -294,7 +445,7 @@ class base_html_playwright(Fetcher):
|
|||||||
# Set user agent to prevent Cloudflare from blocking the browser
|
# Set user agent to prevent Cloudflare from blocking the browser
|
||||||
# Use the default one configured in the App.py model that's passed from fetch_site_status.py
|
# Use the default one configured in the App.py model that's passed from fetch_site_status.py
|
||||||
context = browser.new_context(
|
context = browser.new_context(
|
||||||
user_agent=request_headers['User-Agent'] if request_headers.get('User-Agent') else 'Mozilla/5.0',
|
user_agent={k.lower(): v for k, v in request_headers.items()}.get('user-agent', None),
|
||||||
proxy=self.proxy,
|
proxy=self.proxy,
|
||||||
# This is needed to enable JavaScript execution on GitHub and others
|
# This is needed to enable JavaScript execution on GitHub and others
|
||||||
bypass_csp=True,
|
bypass_csp=True,
|
||||||
@@ -324,12 +475,12 @@ class base_html_playwright(Fetcher):
|
|||||||
except playwright._impl._api_types.Error as e:
|
except playwright._impl._api_types.Error as e:
|
||||||
# Retry once - https://github.com/browserless/chrome/issues/2485
|
# Retry once - https://github.com/browserless/chrome/issues/2485
|
||||||
# Sometimes errors related to invalid cert's and other can be random
|
# Sometimes errors related to invalid cert's and other can be random
|
||||||
print ("Content Fetcher > retrying request got error - ", str(e))
|
print("Content Fetcher > retrying request got error - ", str(e))
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
response = self.page.goto(url, wait_until='commit')
|
response = self.page.goto(url, wait_until='commit')
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print ("Content Fetcher > Other exception when page.goto", str(e))
|
print("Content Fetcher > Other exception when page.goto", str(e))
|
||||||
context.close()
|
context.close()
|
||||||
browser.close()
|
browser.close()
|
||||||
raise PageUnloadable(url=url, status_code=None, message=str(e))
|
raise PageUnloadable(url=url, status_code=None, message=str(e))
|
||||||
@@ -348,7 +499,7 @@ class base_html_playwright(Fetcher):
|
|||||||
# This can be ok, we will try to grab what we could retrieve
|
# This can be ok, we will try to grab what we could retrieve
|
||||||
pass
|
pass
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print ("Content Fetcher > Other exception when executing custom JS code", str(e))
|
print("Content Fetcher > Other exception when executing custom JS code", str(e))
|
||||||
context.close()
|
context.close()
|
||||||
browser.close()
|
browser.close()
|
||||||
raise PageUnloadable(url=url, status_code=None, message=str(e))
|
raise PageUnloadable(url=url, status_code=None, message=str(e))
|
||||||
@@ -356,7 +507,7 @@ class base_html_playwright(Fetcher):
|
|||||||
if response is None:
|
if response is None:
|
||||||
context.close()
|
context.close()
|
||||||
browser.close()
|
browser.close()
|
||||||
print ("Content Fetcher > Response object was none")
|
print("Content Fetcher > Response object was none")
|
||||||
raise EmptyReply(url=url, status_code=None)
|
raise EmptyReply(url=url, status_code=None)
|
||||||
|
|
||||||
# Run Browser Steps here
|
# Run Browser Steps here
|
||||||
@@ -370,7 +521,7 @@ class base_html_playwright(Fetcher):
|
|||||||
if len(self.page.content().strip()) == 0:
|
if len(self.page.content().strip()) == 0:
|
||||||
context.close()
|
context.close()
|
||||||
browser.close()
|
browser.close()
|
||||||
print ("Content Fetcher > Content was empty")
|
print("Content Fetcher > Content was empty")
|
||||||
raise EmptyReply(url=url, status_code=response.status)
|
raise EmptyReply(url=url, status_code=response.status)
|
||||||
|
|
||||||
self.status_code = response.status
|
self.status_code = response.status
|
||||||
@@ -382,7 +533,8 @@ class base_html_playwright(Fetcher):
|
|||||||
else:
|
else:
|
||||||
self.page.evaluate("var include_filters=''")
|
self.page.evaluate("var include_filters=''")
|
||||||
|
|
||||||
self.xpath_data = self.page.evaluate("async () => {" + self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) + "}")
|
self.xpath_data = self.page.evaluate(
|
||||||
|
"async () => {" + self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) + "}")
|
||||||
self.instock_data = self.page.evaluate("async () => {" + self.instock_data_js + "}")
|
self.instock_data = self.page.evaluate("async () => {" + self.instock_data_js + "}")
|
||||||
|
|
||||||
# Bug 3 in Playwright screenshot handling
|
# Bug 3 in Playwright screenshot handling
|
||||||
@@ -394,7 +546,8 @@ class base_html_playwright(Fetcher):
|
|||||||
# acceptable screenshot quality here
|
# acceptable screenshot quality here
|
||||||
try:
|
try:
|
||||||
# The actual screenshot
|
# The actual screenshot
|
||||||
self.screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)))
|
self.screenshot = self.page.screenshot(type='jpeg', full_page=True,
|
||||||
|
quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.close()
|
context.close()
|
||||||
browser.close()
|
browser.close()
|
||||||
@@ -403,6 +556,7 @@ class base_html_playwright(Fetcher):
|
|||||||
context.close()
|
context.close()
|
||||||
browser.close()
|
browser.close()
|
||||||
|
|
||||||
|
|
||||||
class base_html_webdriver(Fetcher):
|
class base_html_webdriver(Fetcher):
|
||||||
if os.getenv("WEBDRIVER_URL"):
|
if os.getenv("WEBDRIVER_URL"):
|
||||||
fetcher_description = "WebDriver Chrome/Javascript via '{}'".format(os.getenv("WEBDRIVER_URL"))
|
fetcher_description = "WebDriver Chrome/Javascript via '{}'".format(os.getenv("WEBDRIVER_URL"))
|
||||||
@@ -532,7 +686,7 @@ class html_requests(Fetcher):
|
|||||||
is_binary=False):
|
is_binary=False):
|
||||||
|
|
||||||
# Make requests use a more modern looking user-agent
|
# Make requests use a more modern looking user-agent
|
||||||
if not 'User-Agent' in request_headers:
|
if not {k.lower(): v for k, v in request_headers.items()}.get('user-agent', None):
|
||||||
request_headers['User-Agent'] = os.getenv("DEFAULT_SETTINGS_HEADERS_USERAGENT",
|
request_headers['User-Agent'] = os.getenv("DEFAULT_SETTINGS_HEADERS_USERAGENT",
|
||||||
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36')
|
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36')
|
||||||
|
|
||||||
|
|||||||
@@ -35,15 +35,19 @@ def customSequenceMatcher(before, after, include_equal=False, include_removed=Tr
|
|||||||
|
|
||||||
# only_differences - only return info about the differences, no context
|
# only_differences - only return info about the differences, no context
|
||||||
# line_feed_sep could be "<br>" or "<li>" or "\n" etc
|
# line_feed_sep could be "<br>" or "<li>" or "\n" etc
|
||||||
def render_diff(previous_version_file_contents, newest_version_file_contents, include_equal=False, include_removed=True, include_added=True, include_replaced=True, line_feed_sep="\n", include_change_type_prefix=True):
|
def render_diff(previous_version_file_contents, newest_version_file_contents, include_equal=False, include_removed=True, include_added=True, include_replaced=True, line_feed_sep="\n", include_change_type_prefix=True, patch_format=False):
|
||||||
|
|
||||||
newest_version_file_contents = [line.rstrip() for line in newest_version_file_contents.splitlines()]
|
newest_version_file_contents = [line.rstrip() for line in newest_version_file_contents.splitlines()]
|
||||||
|
|
||||||
if previous_version_file_contents:
|
if previous_version_file_contents:
|
||||||
previous_version_file_contents = [line.rstrip() for line in previous_version_file_contents.splitlines()]
|
previous_version_file_contents = [line.rstrip() for line in previous_version_file_contents.splitlines()]
|
||||||
else:
|
else:
|
||||||
previous_version_file_contents = ""
|
previous_version_file_contents = ""
|
||||||
|
|
||||||
|
if patch_format:
|
||||||
|
patch = difflib.unified_diff(previous_version_file_contents, newest_version_file_contents)
|
||||||
|
return line_feed_sep.join(patch)
|
||||||
|
|
||||||
rendered_diff = customSequenceMatcher(before=previous_version_file_contents,
|
rendered_diff = customSequenceMatcher(before=previous_version_file_contents,
|
||||||
after=newest_version_file_contents,
|
after=newest_version_file_contents,
|
||||||
include_equal=include_equal,
|
include_equal=include_equal,
|
||||||
@@ -54,4 +58,5 @@ def render_diff(previous_version_file_contents, newest_version_file_contents, in
|
|||||||
|
|
||||||
# Recursively join lists
|
# Recursively join lists
|
||||||
f = lambda L: line_feed_sep.join([f(x) if type(x) is list else x for x in L])
|
f = lambda L: line_feed_sep.join([f(x) if type(x) is list else x for x in L])
|
||||||
return f(rendered_diff)
|
p= f(rendered_diff)
|
||||||
|
return p
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
from distutils.util import strtobool
|
||||||
|
|
||||||
from wtforms import (
|
from wtforms import (
|
||||||
BooleanField,
|
BooleanField,
|
||||||
@@ -28,6 +29,8 @@ from changedetectionio.notification import (
|
|||||||
|
|
||||||
from wtforms.fields import FormField
|
from wtforms.fields import FormField
|
||||||
|
|
||||||
|
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
|
||||||
|
|
||||||
valid_method = {
|
valid_method = {
|
||||||
'GET',
|
'GET',
|
||||||
'POST',
|
'POST',
|
||||||
@@ -90,6 +93,29 @@ class SaltyPasswordField(StringField):
|
|||||||
else:
|
else:
|
||||||
self.data = False
|
self.data = False
|
||||||
|
|
||||||
|
class StringTagUUID(StringField):
|
||||||
|
|
||||||
|
# process_formdata(self, valuelist) handled manually in POST handler
|
||||||
|
|
||||||
|
# Is what is shown when field <input> is rendered
|
||||||
|
def _value(self):
|
||||||
|
# Tag UUID to name, on submit it will convert it back (in the submit handler of init.py)
|
||||||
|
if self.data and type(self.data) is list:
|
||||||
|
tag_titles = []
|
||||||
|
for i in self.data:
|
||||||
|
tag = self.datastore.data['settings']['application']['tags'].get(i)
|
||||||
|
if tag:
|
||||||
|
tag_title = tag.get('title')
|
||||||
|
if tag_title:
|
||||||
|
tag_titles.append(tag_title)
|
||||||
|
|
||||||
|
return ', '.join(tag_titles)
|
||||||
|
|
||||||
|
if not self.data:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
return 'error'
|
||||||
|
|
||||||
class TimeBetweenCheckForm(Form):
|
class TimeBetweenCheckForm(Form):
|
||||||
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||||
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||||
@@ -203,16 +229,19 @@ class ValidateJinja2Template(object):
|
|||||||
def __call__(self, form, field):
|
def __call__(self, form, field):
|
||||||
from changedetectionio import notification
|
from changedetectionio import notification
|
||||||
|
|
||||||
from jinja2 import Environment, BaseLoader, TemplateSyntaxError
|
from jinja2 import Environment, BaseLoader, TemplateSyntaxError, UndefinedError
|
||||||
from jinja2.meta import find_undeclared_variables
|
from jinja2.meta import find_undeclared_variables
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
jinja2_env = Environment(loader=BaseLoader)
|
jinja2_env = Environment(loader=BaseLoader)
|
||||||
jinja2_env.globals.update(notification.valid_tokens)
|
jinja2_env.globals.update(notification.valid_tokens)
|
||||||
|
|
||||||
rendered = jinja2_env.from_string(field.data).render()
|
rendered = jinja2_env.from_string(field.data).render()
|
||||||
except TemplateSyntaxError as e:
|
except TemplateSyntaxError as e:
|
||||||
raise ValidationError(f"This is not a valid Jinja2 template: {e}") from e
|
raise ValidationError(f"This is not a valid Jinja2 template: {e}") from e
|
||||||
|
except UndefinedError as e:
|
||||||
|
raise ValidationError(f"A variable or function is not defined: {e}") from e
|
||||||
|
|
||||||
ast = jinja2_env.parse(field.data)
|
ast = jinja2_env.parse(field.data)
|
||||||
undefined = ", ".join(find_undeclared_variables(ast))
|
undefined = ", ".join(find_undeclared_variables(ast))
|
||||||
@@ -232,9 +261,10 @@ class validateURL(object):
|
|||||||
|
|
||||||
def __call__(self, form, field):
|
def __call__(self, form, field):
|
||||||
import validators
|
import validators
|
||||||
|
# If hosts that only contain alphanumerics are allowed ("localhost" for example)
|
||||||
|
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
|
||||||
try:
|
try:
|
||||||
validators.url(field.data.strip())
|
validators.url(field.data.strip(), simple_host=allow_simplehost)
|
||||||
except validators.ValidationFailure:
|
except validators.ValidationFailure:
|
||||||
message = field.gettext('\'%s\' is not a valid URL.' % (field.data.strip()))
|
message = field.gettext('\'%s\' is not a valid URL.' % (field.data.strip()))
|
||||||
raise ValidationError(message)
|
raise ValidationError(message)
|
||||||
@@ -347,7 +377,7 @@ class quickWatchForm(Form):
|
|||||||
from . import processors
|
from . import processors
|
||||||
|
|
||||||
url = fields.URLField('URL', validators=[validateURL()])
|
url = fields.URLField('URL', validators=[validateURL()])
|
||||||
tag = StringField('Group tag', [validators.Optional()])
|
tags = StringTagUUID('Group tag', [validators.Optional()])
|
||||||
watch_submit_button = SubmitField('Watch', render_kw={"class": "pure-button pure-button-primary"})
|
watch_submit_button = SubmitField('Watch', render_kw={"class": "pure-button pure-button-primary"})
|
||||||
processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff")
|
processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff")
|
||||||
edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"})
|
edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"})
|
||||||
@@ -355,6 +385,7 @@ class quickWatchForm(Form):
|
|||||||
|
|
||||||
# Common to a single watch and the global settings
|
# Common to a single watch and the global settings
|
||||||
class commonSettingsForm(Form):
|
class commonSettingsForm(Form):
|
||||||
|
|
||||||
notification_urls = StringListField('Notification URL List', validators=[validators.Optional(), ValidateAppRiseServers()])
|
notification_urls = StringListField('Notification URL List', validators=[validators.Optional(), ValidateAppRiseServers()])
|
||||||
notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()])
|
notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()])
|
||||||
notification_body = TextAreaField('Notification Body', default='{{ watch_url }} had a change.', validators=[validators.Optional(), ValidateJinja2Template()])
|
notification_body = TextAreaField('Notification Body', default='{{ watch_url }} had a change.', validators=[validators.Optional(), ValidateJinja2Template()])
|
||||||
@@ -382,7 +413,7 @@ class SingleBrowserStep(Form):
|
|||||||
class watchForm(commonSettingsForm):
|
class watchForm(commonSettingsForm):
|
||||||
|
|
||||||
url = fields.URLField('URL', validators=[validateURL()])
|
url = fields.URLField('URL', validators=[validateURL()])
|
||||||
tag = StringField('Group tag', [validators.Optional()], default='')
|
tags = StringTagUUID('Group tag', [validators.Optional()], default='')
|
||||||
|
|
||||||
time_between_check = FormField(TimeBetweenCheckForm)
|
time_between_check = FormField(TimeBetweenCheckForm)
|
||||||
|
|
||||||
@@ -474,13 +505,20 @@ class globalSettingsRequestForm(Form):
|
|||||||
class globalSettingsApplicationForm(commonSettingsForm):
|
class globalSettingsApplicationForm(commonSettingsForm):
|
||||||
|
|
||||||
api_access_token_enabled = BooleanField('API access token security check enabled', default=True, validators=[validators.Optional()])
|
api_access_token_enabled = BooleanField('API access token security check enabled', default=True, validators=[validators.Optional()])
|
||||||
base_url = StringField('Base URL', validators=[validators.Optional()])
|
base_url = StringField('Notification base URL override',
|
||||||
|
validators=[validators.Optional()],
|
||||||
|
render_kw={"placeholder": os.getenv('BASE_URL', 'Not set')}
|
||||||
|
)
|
||||||
empty_pages_are_a_change = BooleanField('Treat empty pages as a change?', default=False)
|
empty_pages_are_a_change = BooleanField('Treat empty pages as a change?', default=False)
|
||||||
fetch_backend = RadioField('Fetch Method', default="html_requests", choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
fetch_backend = RadioField('Fetch Method', default="html_requests", choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||||
global_ignore_text = StringListField('Ignore Text', [ValidateListRegex()])
|
global_ignore_text = StringListField('Ignore Text', [ValidateListRegex()])
|
||||||
global_subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)])
|
global_subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)])
|
||||||
ignore_whitespace = BooleanField('Ignore whitespace')
|
ignore_whitespace = BooleanField('Ignore whitespace')
|
||||||
password = SaltyPasswordField()
|
password = SaltyPasswordField()
|
||||||
|
pager_size = IntegerField('Pager size',
|
||||||
|
render_kw={"style": "width: 5em;"},
|
||||||
|
validators=[validators.NumberRange(min=0,
|
||||||
|
message="Should be atleast zero (disabled)")])
|
||||||
removepassword_button = SubmitField('Remove password', render_kw={"class": "pure-button pure-button-primary"})
|
removepassword_button = SubmitField('Remove password', render_kw={"class": "pure-button pure-button-primary"})
|
||||||
render_anchor_tag_content = BooleanField('Render anchor tag content', default=False)
|
render_anchor_tag_content = BooleanField('Render anchor tag content', default=False)
|
||||||
shared_diff_access = BooleanField('Allow access to view diff page when password is enabled', default=False, validators=[validators.Optional()])
|
shared_diff_access = BooleanField('Allow access to view diff page when password is enabled', default=False, validators=[validators.Optional()])
|
||||||
|
|||||||
@@ -7,17 +7,35 @@ from typing import List
|
|||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
# HTML added to be sure each result matching a filter (.example) gets converted to a new line by Inscriptis
|
# HTML added to be sure each result matching a filter (.example) gets converted to a new line by Inscriptis
|
||||||
TEXT_FILTER_LIST_LINE_SUFFIX = "<br>"
|
TEXT_FILTER_LIST_LINE_SUFFIX = "<br>"
|
||||||
|
|
||||||
|
PERL_STYLE_REGEX = r'^/(.*?)/([a-z]*)?$'
|
||||||
# 'price' , 'lowPrice', 'highPrice' are usually under here
|
# 'price' , 'lowPrice', 'highPrice' are usually under here
|
||||||
# all of those may or may not appear on different websites
|
# All of those may or may not appear on different websites - I didnt find a way todo case-insensitive searching here
|
||||||
LD_JSON_PRODUCT_OFFER_SELECTOR = "json:$..offers"
|
LD_JSON_PRODUCT_OFFER_SELECTORS = ["json:$..offers", "json:$..Offers"]
|
||||||
|
|
||||||
class JSONNotFound(ValueError):
|
class JSONNotFound(ValueError):
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
ValueError.__init__(self, msg)
|
ValueError.__init__(self, msg)
|
||||||
|
|
||||||
|
|
||||||
|
# Doesn't look like python supports forward slash auto enclosure in re.findall
|
||||||
|
# So convert it to inline flag "(?i)foobar" type configuration
|
||||||
|
def perl_style_slash_enclosed_regex_to_options(regex):
|
||||||
|
|
||||||
|
res = re.search(PERL_STYLE_REGEX, regex, re.IGNORECASE)
|
||||||
|
|
||||||
|
if res:
|
||||||
|
flags = res.group(2) if res.group(2) else 'i'
|
||||||
|
regex = f"(?{flags}){res.group(1)}"
|
||||||
|
else:
|
||||||
|
# Fall back to just ignorecase as an option
|
||||||
|
regex = f"(?i){regex}"
|
||||||
|
|
||||||
|
return regex
|
||||||
|
|
||||||
# Given a CSS Rule, and a blob of HTML, return the blob of HTML that matches
|
# Given a CSS Rule, and a blob of HTML, return the blob of HTML that matches
|
||||||
def include_filters(include_filters, html_content, append_pretty_line_formatting=False):
|
def include_filters(include_filters, html_content, append_pretty_line_formatting=False):
|
||||||
soup = BeautifulSoup(html_content, "html.parser")
|
soup = BeautifulSoup(html_content, "html.parser")
|
||||||
@@ -137,45 +155,58 @@ def _get_stripped_text_from_json_match(match):
|
|||||||
def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None):
|
def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None):
|
||||||
stripped_text_from_html = False
|
stripped_text_from_html = False
|
||||||
|
|
||||||
# Try to parse/filter out the JSON, if we get some parser error, then maybe it's embedded <script type=ldjson>
|
# Try to parse/filter out the JSON, if we get some parser error, then maybe it's embedded within HTML tags
|
||||||
try:
|
try:
|
||||||
stripped_text_from_html = _parse_json(json.loads(content), json_filter)
|
stripped_text_from_html = _parse_json(json.loads(content), json_filter)
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
|
|
||||||
# Foreach <script json></script> blob.. just return the first that matches json_filter
|
# Foreach <script json></script> blob.. just return the first that matches json_filter
|
||||||
s = []
|
# As a last resort, try to parse the whole <body>
|
||||||
soup = BeautifulSoup(content, 'html.parser')
|
soup = BeautifulSoup(content, 'html.parser')
|
||||||
|
|
||||||
if ensure_is_ldjson_info_type:
|
if ensure_is_ldjson_info_type:
|
||||||
bs_result = soup.findAll('script', {"type": "application/ld+json"})
|
bs_result = soup.findAll('script', {"type": "application/ld+json"})
|
||||||
else:
|
else:
|
||||||
bs_result = soup.findAll('script')
|
bs_result = soup.findAll('script')
|
||||||
|
bs_result += soup.findAll('body')
|
||||||
|
|
||||||
|
bs_jsons = []
|
||||||
if not bs_result:
|
|
||||||
raise JSONNotFound("No parsable JSON found in this document")
|
|
||||||
|
|
||||||
for result in bs_result:
|
for result in bs_result:
|
||||||
# Skip empty tags, and things that dont even look like JSON
|
# Skip empty tags, and things that dont even look like JSON
|
||||||
if not result.string or not '{' in result.string:
|
if not result.text or '{' not in result.text:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
json_data = json.loads(result.string)
|
json_data = json.loads(result.text)
|
||||||
|
bs_jsons.append(json_data)
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
# Just skip it
|
# Skip objects which cannot be parsed
|
||||||
continue
|
continue
|
||||||
else:
|
|
||||||
stripped_text_from_html = _parse_json(json_data, json_filter)
|
if not bs_jsons:
|
||||||
if ensure_is_ldjson_info_type:
|
raise JSONNotFound("No parsable JSON found in this document")
|
||||||
# Could sometimes be list, string or something else random
|
|
||||||
if isinstance(json_data, dict):
|
for json_data in bs_jsons:
|
||||||
# If it has LD JSON 'key' @type, and @type is 'product', and something was found for the search
|
stripped_text_from_html = _parse_json(json_data, json_filter)
|
||||||
# (Some sites have multiple of the same ld+json @type='product', but some have the review part, some have the 'price' part)
|
|
||||||
if json_data.get('@type', False) and json_data.get('@type','').lower() == ensure_is_ldjson_info_type.lower() and stripped_text_from_html:
|
if ensure_is_ldjson_info_type:
|
||||||
break
|
# Could sometimes be list, string or something else random
|
||||||
elif stripped_text_from_html:
|
if isinstance(json_data, dict):
|
||||||
break
|
# If it has LD JSON 'key' @type, and @type is 'product', and something was found for the search
|
||||||
|
# (Some sites have multiple of the same ld+json @type='product', but some have the review part, some have the 'price' part)
|
||||||
|
# @type could also be a list (Product, SubType)
|
||||||
|
# LD_JSON auto-extract also requires some content PLUS the ldjson to be present
|
||||||
|
# 1833 - could be either str or dict, should not be anything else
|
||||||
|
if json_data.get('@type') and stripped_text_from_html:
|
||||||
|
try:
|
||||||
|
if json_data.get('@type') == str or json_data.get('@type') == dict:
|
||||||
|
types = [json_data.get('@type')] if isinstance(json_data.get('@type'), str) else json_data.get('@type')
|
||||||
|
if ensure_is_ldjson_info_type.lower() in [x.lower().strip() for x in types]:
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif stripped_text_from_html:
|
||||||
|
break
|
||||||
|
|
||||||
if not stripped_text_from_html:
|
if not stripped_text_from_html:
|
||||||
# Re 265 - Just return an empty string when filter not found
|
# Re 265 - Just return an empty string when filter not found
|
||||||
@@ -188,42 +219,41 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None
|
|||||||
#
|
#
|
||||||
# wordlist - list of regex's (str) or words (str)
|
# wordlist - list of regex's (str) or words (str)
|
||||||
def strip_ignore_text(content, wordlist, mode="content"):
|
def strip_ignore_text(content, wordlist, mode="content"):
|
||||||
ignore = []
|
|
||||||
ignore_regex = []
|
|
||||||
|
|
||||||
# @todo check this runs case insensitive
|
|
||||||
for k in wordlist:
|
|
||||||
|
|
||||||
# Is it a regex?
|
|
||||||
if k[0] == '/':
|
|
||||||
ignore_regex.append(k.strip(" /"))
|
|
||||||
else:
|
|
||||||
ignore.append(k)
|
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
output = []
|
output = []
|
||||||
|
ignore_text = []
|
||||||
|
ignore_regex = []
|
||||||
ignored_line_numbers = []
|
ignored_line_numbers = []
|
||||||
|
|
||||||
|
for k in wordlist:
|
||||||
|
# Is it a regex?
|
||||||
|
res = re.search(PERL_STYLE_REGEX, k, re.IGNORECASE)
|
||||||
|
if res:
|
||||||
|
ignore_regex.append(re.compile(perl_style_slash_enclosed_regex_to_options(k)))
|
||||||
|
else:
|
||||||
|
ignore_text.append(k.strip())
|
||||||
|
|
||||||
for line in content.splitlines():
|
for line in content.splitlines():
|
||||||
i += 1
|
i += 1
|
||||||
# Always ignore blank lines in this mode. (when this function gets called)
|
# Always ignore blank lines in this mode. (when this function gets called)
|
||||||
|
got_match = False
|
||||||
if len(line.strip()):
|
if len(line.strip()):
|
||||||
regex_matches = False
|
for l in ignore_text:
|
||||||
|
if l.lower() in line.lower():
|
||||||
|
got_match = True
|
||||||
|
|
||||||
# if any of these match, skip
|
if not got_match:
|
||||||
for regex in ignore_regex:
|
for r in ignore_regex:
|
||||||
try:
|
if r.search(line):
|
||||||
if re.search(regex, line, re.IGNORECASE):
|
got_match = True
|
||||||
regex_matches = True
|
|
||||||
except Exception as e:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not regex_matches and not any(skip_text.lower() in line.lower() for skip_text in ignore):
|
if not got_match:
|
||||||
|
# Not ignored
|
||||||
output.append(line.encode('utf8'))
|
output.append(line.encode('utf8'))
|
||||||
else:
|
else:
|
||||||
ignored_line_numbers.append(i)
|
ignored_line_numbers.append(i)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Used for finding out what to highlight
|
# Used for finding out what to highlight
|
||||||
if mode == "line numbers":
|
if mode == "line numbers":
|
||||||
return ignored_line_numbers
|
return ignored_line_numbers
|
||||||
@@ -264,9 +294,18 @@ def html_to_text(html_content: str, render_anchor_tag_content=False) -> str:
|
|||||||
|
|
||||||
# Does LD+JSON exist with a @type=='product' and a .price set anywhere?
|
# Does LD+JSON exist with a @type=='product' and a .price set anywhere?
|
||||||
def has_ldjson_product_info(content):
|
def has_ldjson_product_info(content):
|
||||||
|
pricing_data = ''
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pricing_data = extract_json_as_string(content=content, json_filter=LD_JSON_PRODUCT_OFFER_SELECTOR, ensure_is_ldjson_info_type="product")
|
if not 'application/ld+json' in content:
|
||||||
except JSONNotFound as e:
|
return False
|
||||||
|
|
||||||
|
for filter in LD_JSON_PRODUCT_OFFER_SELECTORS:
|
||||||
|
pricing_data += extract_json_as_string(content=content,
|
||||||
|
json_filter=filter,
|
||||||
|
ensure_is_ldjson_info_type="product")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
# Totally fine
|
# Totally fine
|
||||||
return False
|
return False
|
||||||
x=bool(pricing_data)
|
x=bool(pricing_data)
|
||||||
|
|||||||
@@ -52,7 +52,8 @@ class import_url_list(Importer):
|
|||||||
|
|
||||||
# Flask wtform validators wont work with basic auth, use validators package
|
# Flask wtform validators wont work with basic auth, use validators package
|
||||||
# Up to 5000 per batch so we dont flood the server
|
# Up to 5000 per batch so we dont flood the server
|
||||||
if len(url) and validators.url(url.replace('source:', '')) and good < 5000:
|
# @todo validators.url failed on local hostnames (such as referring to ourself when using browserless)
|
||||||
|
if len(url) and 'http' in url.lower() and good < 5000:
|
||||||
extras = None
|
extras = None
|
||||||
if processor:
|
if processor:
|
||||||
extras = {'processor': processor}
|
extras = {'processor': processor}
|
||||||
@@ -84,7 +85,8 @@ class import_distill_io_json(Importer):
|
|||||||
now = time.time()
|
now = time.time()
|
||||||
self.new_uuids=[]
|
self.new_uuids=[]
|
||||||
|
|
||||||
|
# @todo Use JSONSchema like in the API to validate here.
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = json.loads(data.strip())
|
data = json.loads(data.strip())
|
||||||
except json.decoder.JSONDecodeError:
|
except json.decoder.JSONDecodeError:
|
||||||
@@ -119,11 +121,8 @@ class import_distill_io_json(Importer):
|
|||||||
except IndexError:
|
except IndexError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
if d.get('tags', False):
|
|
||||||
extras['tag'] = ", ".join(d['tags'])
|
|
||||||
|
|
||||||
new_uuid = datastore.add_watch(url=d['uri'].strip(),
|
new_uuid = datastore.add_watch(url=d['uri'].strip(),
|
||||||
|
tag=",".join(d.get('tags', [])),
|
||||||
extras=extras,
|
extras=extras,
|
||||||
write_to_disk_now=False)
|
write_to_disk_now=False)
|
||||||
|
|
||||||
|
|||||||
@@ -23,25 +23,27 @@ class model(dict):
|
|||||||
'workers': int(getenv("DEFAULT_SETTINGS_REQUESTS_WORKERS", "10")), # Number of threads, lower is better for slow connections
|
'workers': int(getenv("DEFAULT_SETTINGS_REQUESTS_WORKERS", "10")), # Number of threads, lower is better for slow connections
|
||||||
},
|
},
|
||||||
'application': {
|
'application': {
|
||||||
|
# Custom notification content
|
||||||
'api_access_token_enabled': True,
|
'api_access_token_enabled': True,
|
||||||
'password': False,
|
|
||||||
'base_url' : None,
|
'base_url' : None,
|
||||||
'extract_title_as_title': False,
|
|
||||||
'empty_pages_are_a_change': False,
|
'empty_pages_are_a_change': False,
|
||||||
|
'extract_title_as_title': False,
|
||||||
'fetch_backend': getenv("DEFAULT_FETCH_BACKEND", "html_requests"),
|
'fetch_backend': getenv("DEFAULT_FETCH_BACKEND", "html_requests"),
|
||||||
'filter_failure_notification_threshold_attempts': _FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT,
|
'filter_failure_notification_threshold_attempts': _FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT,
|
||||||
'global_ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
'global_ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||||
'global_subtractive_selectors': [],
|
'global_subtractive_selectors': [],
|
||||||
'ignore_whitespace': True,
|
'ignore_whitespace': True,
|
||||||
'render_anchor_tag_content': False,
|
|
||||||
'notification_urls': [], # Apprise URL list
|
|
||||||
# Custom notification content
|
|
||||||
'notification_title': default_notification_title,
|
|
||||||
'notification_body': default_notification_body,
|
'notification_body': default_notification_body,
|
||||||
'notification_format': default_notification_format,
|
'notification_format': default_notification_format,
|
||||||
|
'notification_title': default_notification_title,
|
||||||
|
'notification_urls': [], # Apprise URL list
|
||||||
|
'pager_size': 50,
|
||||||
|
'password': False,
|
||||||
|
'render_anchor_tag_content': False,
|
||||||
'schema_version' : 0,
|
'schema_version' : 0,
|
||||||
'shared_diff_access': False,
|
'shared_diff_access': False,
|
||||||
'webdriver_delay': None # Extra delay in seconds before extracting text
|
'webdriver_delay': None , # Extra delay in seconds before extracting text
|
||||||
|
'tags': {} #@todo use Tag.model initialisers
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -49,3 +51,15 @@ class model(dict):
|
|||||||
def __init__(self, *arg, **kw):
|
def __init__(self, *arg, **kw):
|
||||||
super(model, self).__init__(*arg, **kw)
|
super(model, self).__init__(*arg, **kw)
|
||||||
self.update(self.base_config)
|
self.update(self.base_config)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_headers_from_text_file(filepath):
|
||||||
|
headers = {}
|
||||||
|
with open(filepath, 'r') as f:
|
||||||
|
for l in f.readlines():
|
||||||
|
l = l.strip()
|
||||||
|
if not l.startswith('#') and ':' in l:
|
||||||
|
(k, v) = l.split(':')
|
||||||
|
headers[k.strip()] = v.strip()
|
||||||
|
|
||||||
|
return headers
|
||||||
19
changedetectionio/model/Tag.py
Normal file
19
changedetectionio/model/Tag.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
from .Watch import base_config
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
class model(dict):
|
||||||
|
|
||||||
|
def __init__(self, *arg, **kw):
|
||||||
|
|
||||||
|
self.update(base_config)
|
||||||
|
|
||||||
|
self['uuid'] = str(uuid.uuid4())
|
||||||
|
|
||||||
|
if kw.get('default'):
|
||||||
|
self.update(kw['default'])
|
||||||
|
del kw['default']
|
||||||
|
|
||||||
|
|
||||||
|
# Goes at the end so we update the default object with the initialiser
|
||||||
|
super(model, self).__init__(*arg, **kw)
|
||||||
|
|
||||||
@@ -20,6 +20,7 @@ base_config = {
|
|||||||
'body': None,
|
'body': None,
|
||||||
'check_unique_lines': False, # On change-detected, compare against all history if its something new
|
'check_unique_lines': False, # On change-detected, compare against all history if its something new
|
||||||
'check_count': 0,
|
'check_count': 0,
|
||||||
|
'date_created': None,
|
||||||
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
|
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
|
||||||
'extract_text': [], # Extract text by regex after filters
|
'extract_text': [], # Extract text by regex after filters
|
||||||
'extract_title_as_title': False,
|
'extract_title_as_title': False,
|
||||||
@@ -51,7 +52,8 @@ base_config = {
|
|||||||
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
|
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
|
||||||
'proxy': None, # Preferred proxy connection
|
'proxy': None, # Preferred proxy connection
|
||||||
'subtractive_selectors': [],
|
'subtractive_selectors': [],
|
||||||
'tag': None,
|
'tag': '', # Old system of text name for a tag, to be removed
|
||||||
|
'tags': [], # list of UUIDs to App.Tags
|
||||||
'text_should_not_be_present': [], # Text that should not present
|
'text_should_not_be_present': [], # Text that should not present
|
||||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||||
# Requires setting to None on submit if it's the same as the default
|
# Requires setting to None on submit if it's the same as the default
|
||||||
@@ -454,10 +456,6 @@ class model(dict):
|
|||||||
|
|
||||||
return csv_output_filename
|
return csv_output_filename
|
||||||
|
|
||||||
@property
|
|
||||||
# Return list of tags, stripped and lowercase, used for searching
|
|
||||||
def all_tags(self):
|
|
||||||
return [s.strip().lower() for s in self.get('tag','').split(',')]
|
|
||||||
|
|
||||||
def has_special_diff_filter_options_set(self):
|
def has_special_diff_filter_options_set(self):
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ valid_tokens = {
|
|||||||
'diff': '',
|
'diff': '',
|
||||||
'diff_added': '',
|
'diff_added': '',
|
||||||
'diff_full': '',
|
'diff_full': '',
|
||||||
|
'diff_patch': '',
|
||||||
'diff_removed': '',
|
'diff_removed': '',
|
||||||
'diff_url': '',
|
'diff_url': '',
|
||||||
'preview_url': '',
|
'preview_url': '',
|
||||||
@@ -89,10 +90,16 @@ def process_notification(n_object, datastore):
|
|||||||
n_body = jinja2_env.from_string(n_object.get('notification_body', default_notification_body)).render(**notification_parameters)
|
n_body = jinja2_env.from_string(n_object.get('notification_body', default_notification_body)).render(**notification_parameters)
|
||||||
n_title = jinja2_env.from_string(n_object.get('notification_title', default_notification_title)).render(**notification_parameters)
|
n_title = jinja2_env.from_string(n_object.get('notification_title', default_notification_title)).render(**notification_parameters)
|
||||||
n_format = valid_notification_formats.get(
|
n_format = valid_notification_formats.get(
|
||||||
n_object['notification_format'],
|
n_object.get('notification_format', default_notification_format),
|
||||||
valid_notification_formats[default_notification_format],
|
valid_notification_formats[default_notification_format],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# If we arrived with 'System default' then look it up
|
||||||
|
if n_format == default_notification_format_for_watch and datastore.data['settings']['application'].get('notification_format') != default_notification_format_for_watch:
|
||||||
|
# Initially text or whatever
|
||||||
|
n_format = datastore.data['settings']['application'].get('notification_format', valid_notification_formats[default_notification_format])
|
||||||
|
|
||||||
|
|
||||||
# https://github.com/caronc/apprise/wiki/Development_LogCapture
|
# https://github.com/caronc/apprise/wiki/Development_LogCapture
|
||||||
# Anything higher than or equal to WARNING (which covers things like Connection errors)
|
# Anything higher than or equal to WARNING (which covers things like Connection errors)
|
||||||
# raise it as an exception
|
# raise it as an exception
|
||||||
@@ -145,9 +152,12 @@ def process_notification(n_object, datastore):
|
|||||||
# Apprise will default to HTML, so we need to override it
|
# Apprise will default to HTML, so we need to override it
|
||||||
# So that whats' generated in n_body is in line with what is going to be sent.
|
# So that whats' generated in n_body is in line with what is going to be sent.
|
||||||
# https://github.com/caronc/apprise/issues/633#issuecomment-1191449321
|
# https://github.com/caronc/apprise/issues/633#issuecomment-1191449321
|
||||||
if not 'format=' in url and (n_format == 'text' or n_format == 'markdown'):
|
if not 'format=' in url and (n_format == 'Text' or n_format == 'Markdown'):
|
||||||
prefix = '?' if not '?' in url else '&'
|
prefix = '?' if not '?' in url else '&'
|
||||||
|
# Apprise format is lowercase text https://github.com/caronc/apprise/issues/633
|
||||||
|
n_format = n_format.tolower()
|
||||||
url = "{}{}format={}".format(url, prefix, n_format)
|
url = "{}{}format={}".format(url, prefix, n_format)
|
||||||
|
# If n_format == HTML, then apprise email should default to text/html and we should be sending HTML only
|
||||||
|
|
||||||
apobj.add(url)
|
apobj.add(url)
|
||||||
|
|
||||||
@@ -168,7 +178,7 @@ def process_notification(n_object, datastore):
|
|||||||
log_value = logs.getvalue()
|
log_value = logs.getvalue()
|
||||||
if log_value and 'WARNING' in log_value or 'ERROR' in log_value:
|
if log_value and 'WARNING' in log_value or 'ERROR' in log_value:
|
||||||
raise Exception(log_value)
|
raise Exception(log_value)
|
||||||
|
|
||||||
sent_objs.append({'title': n_title,
|
sent_objs.append({'title': n_title,
|
||||||
'body': n_body,
|
'body': n_body,
|
||||||
'url' : url,
|
'url' : url,
|
||||||
@@ -186,22 +196,23 @@ def create_notification_parameters(n_object, datastore):
|
|||||||
uuid = n_object['uuid'] if 'uuid' in n_object else ''
|
uuid = n_object['uuid'] if 'uuid' in n_object else ''
|
||||||
|
|
||||||
if uuid != '':
|
if uuid != '':
|
||||||
watch_title = datastore.data['watching'][uuid]['title']
|
watch_title = datastore.data['watching'][uuid].get('title', '')
|
||||||
watch_tag = datastore.data['watching'][uuid]['tag']
|
tag_list = []
|
||||||
|
tags = datastore.get_all_tags_for_watch(uuid)
|
||||||
|
if tags:
|
||||||
|
for tag_uuid, tag in tags.items():
|
||||||
|
tag_list.append(tag.get('title'))
|
||||||
|
watch_tag = ', '.join(tag_list)
|
||||||
else:
|
else:
|
||||||
watch_title = 'Change Detection'
|
watch_title = 'Change Detection'
|
||||||
watch_tag = ''
|
watch_tag = ''
|
||||||
|
|
||||||
# Create URLs to customise the notification with
|
# Create URLs to customise the notification with
|
||||||
base_url = datastore.data['settings']['application']['base_url']
|
# active_base_url - set in store.py data property
|
||||||
|
base_url = datastore.data['settings']['application'].get('active_base_url')
|
||||||
|
|
||||||
watch_url = n_object['watch_url']
|
watch_url = n_object['watch_url']
|
||||||
|
|
||||||
# Re #148 - Some people have just {{ base_url }} in the body or title, but this may break some notification services
|
|
||||||
# like 'Join', so it's always best to atleast set something obvious so that they are not broken.
|
|
||||||
if base_url == '':
|
|
||||||
base_url = "<base-url-env-var-not-set>"
|
|
||||||
|
|
||||||
diff_url = "{}/diff/{}".format(base_url, uuid)
|
diff_url = "{}/diff/{}".format(base_url, uuid)
|
||||||
preview_url = "{}/preview/{}".format(base_url, uuid)
|
preview_url = "{}/preview/{}".format(base_url, uuid)
|
||||||
|
|
||||||
@@ -211,11 +222,12 @@ def create_notification_parameters(n_object, datastore):
|
|||||||
# Valid_tokens also used as a field validator
|
# Valid_tokens also used as a field validator
|
||||||
tokens.update(
|
tokens.update(
|
||||||
{
|
{
|
||||||
'base_url': base_url if base_url is not None else '',
|
'base_url': base_url,
|
||||||
'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else '',
|
'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else '',
|
||||||
'diff': n_object.get('diff', ''), # Null default in the case we use a test
|
'diff': n_object.get('diff', ''), # Null default in the case we use a test
|
||||||
'diff_added': n_object.get('diff_added', ''), # Null default in the case we use a test
|
'diff_added': n_object.get('diff_added', ''), # Null default in the case we use a test
|
||||||
'diff_full': n_object.get('diff_full', ''), # Null default in the case we use a test
|
'diff_full': n_object.get('diff_full', ''), # Null default in the case we use a test
|
||||||
|
'diff_patch': n_object.get('diff_patch', ''), # Null default in the case we use a test
|
||||||
'diff_removed': n_object.get('diff_removed', ''), # Null default in the case we use a test
|
'diff_removed': n_object.get('diff_removed', ''), # Null default in the case we use a test
|
||||||
'diff_url': diff_url,
|
'diff_url': diff_url,
|
||||||
'preview_url': preview_url,
|
'preview_url': preview_url,
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ class difference_detection_processor():
|
|||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def run(self, uuid, skip_when_checksum_same=True):
|
def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
|
||||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||||
some_data = 'xxxxx'
|
some_data = 'xxxxx'
|
||||||
update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()
|
update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()
|
||||||
|
|||||||
@@ -12,6 +12,12 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
|||||||
name = 'Re-stock detection for single product pages'
|
name = 'Re-stock detection for single product pages'
|
||||||
description = 'Detects if the product goes back to in-stock'
|
description = 'Detects if the product goes back to in-stock'
|
||||||
|
|
||||||
|
class UnableToExtractRestockData(Exception):
|
||||||
|
def __init__(self, status_code):
|
||||||
|
# Set this so we can use it in other parts of the app
|
||||||
|
self.status_code = status_code
|
||||||
|
return
|
||||||
|
|
||||||
class perform_site_check(difference_detection_processor):
|
class perform_site_check(difference_detection_processor):
|
||||||
screenshot = None
|
screenshot = None
|
||||||
xpath_data = None
|
xpath_data = None
|
||||||
@@ -36,11 +42,10 @@ class perform_site_check(difference_detection_processor):
|
|||||||
|
|
||||||
# Unset any existing notification error
|
# Unset any existing notification error
|
||||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||||
extra_headers = watch.get('headers', [])
|
|
||||||
|
|
||||||
# Tweak the base config with the per-watch ones
|
request_headers = watch.get('headers', [])
|
||||||
request_headers = deepcopy(self.datastore.data['settings']['headers'])
|
request_headers.update(self.datastore.get_all_base_headers())
|
||||||
request_headers.update(extra_headers)
|
request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=uuid))
|
||||||
|
|
||||||
# https://github.com/psf/requests/issues/4525
|
# https://github.com/psf/requests/issues/4525
|
||||||
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
||||||
@@ -105,7 +110,8 @@ class perform_site_check(difference_detection_processor):
|
|||||||
fetched_md5 = hashlib.md5(fetcher.instock_data.encode('utf-8')).hexdigest()
|
fetched_md5 = hashlib.md5(fetcher.instock_data.encode('utf-8')).hexdigest()
|
||||||
# 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold.
|
# 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold.
|
||||||
update_obj["in_stock"] = True if fetcher.instock_data == 'Possibly in stock' else False
|
update_obj["in_stock"] = True if fetcher.instock_data == 'Possibly in stock' else False
|
||||||
|
else:
|
||||||
|
raise UnableToExtractRestockData(status_code=fetcher.status_code)
|
||||||
|
|
||||||
# The main thing that all this at the moment comes down to :)
|
# The main thing that all this at the moment comes down to :)
|
||||||
changed_detected = False
|
changed_detected = False
|
||||||
|
|||||||
@@ -11,17 +11,19 @@ from changedetectionio import content_fetcher, html_tools
|
|||||||
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
|
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from . import difference_detection_processor
|
from . import difference_detection_processor
|
||||||
|
from ..html_tools import PERL_STYLE_REGEX
|
||||||
|
|
||||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
|
|
||||||
|
name = 'Webpage Text/HTML, JSON and PDF changes'
|
||||||
name = 'Webpage Text/HTML, JSON and PDF changes'
|
|
||||||
description = 'Detects all text changes where possible'
|
description = 'Detects all text changes where possible'
|
||||||
|
json_filter_prefixes = ['json:', 'jq:']
|
||||||
|
|
||||||
class FilterNotFoundInResponse(ValueError):
|
class FilterNotFoundInResponse(ValueError):
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
ValueError.__init__(self, msg)
|
ValueError.__init__(self, msg)
|
||||||
|
|
||||||
|
|
||||||
class PDFToHTMLToolNotFound(ValueError):
|
class PDFToHTMLToolNotFound(ValueError):
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
ValueError.__init__(self, msg)
|
ValueError.__init__(self, msg)
|
||||||
@@ -37,27 +39,13 @@ class perform_site_check(difference_detection_processor):
|
|||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.datastore = datastore
|
self.datastore = datastore
|
||||||
|
|
||||||
# Doesn't look like python supports forward slash auto enclosure in re.findall
|
def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
|
||||||
# So convert it to inline flag "foobar(?i)" type configuration
|
|
||||||
def forward_slash_enclosed_regex_to_options(self, regex):
|
|
||||||
res = re.search(r'^/(.*?)/(\w+)$', regex, re.IGNORECASE)
|
|
||||||
|
|
||||||
if res:
|
|
||||||
regex = res.group(1)
|
|
||||||
regex += '(?{})'.format(res.group(2))
|
|
||||||
else:
|
|
||||||
regex += '(?{})'.format('i')
|
|
||||||
|
|
||||||
return regex
|
|
||||||
|
|
||||||
def run(self, uuid, skip_when_checksum_same=True):
|
|
||||||
changed_detected = False
|
changed_detected = False
|
||||||
screenshot = False # as bytes
|
screenshot = False # as bytes
|
||||||
stripped_text_from_html = ""
|
stripped_text_from_html = ""
|
||||||
|
|
||||||
# DeepCopy so we can be sure we don't accidently change anything by reference
|
# DeepCopy so we can be sure we don't accidently change anything by reference
|
||||||
watch = deepcopy(self.datastore.data['watching'].get(uuid))
|
watch = deepcopy(self.datastore.data['watching'].get(uuid))
|
||||||
|
|
||||||
if not watch:
|
if not watch:
|
||||||
raise Exception("Watch no longer exists.")
|
raise Exception("Watch no longer exists.")
|
||||||
|
|
||||||
@@ -70,11 +58,10 @@ class perform_site_check(difference_detection_processor):
|
|||||||
# Unset any existing notification error
|
# Unset any existing notification error
|
||||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||||
|
|
||||||
extra_headers = watch.get('headers', [])
|
|
||||||
|
|
||||||
# Tweak the base config with the per-watch ones
|
# Tweak the base config with the per-watch ones
|
||||||
request_headers = deepcopy(self.datastore.data['settings']['headers'])
|
request_headers = watch.get('headers', [])
|
||||||
request_headers.update(extra_headers)
|
request_headers.update(self.datastore.get_all_base_headers())
|
||||||
|
request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=uuid))
|
||||||
|
|
||||||
# https://github.com/psf/requests/issues/4525
|
# https://github.com/psf/requests/issues/4525
|
||||||
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
||||||
@@ -107,7 +94,11 @@ class perform_site_check(difference_detection_processor):
|
|||||||
# If the klass doesnt exist, just use a default
|
# If the klass doesnt exist, just use a default
|
||||||
klass = getattr(content_fetcher, "html_requests")
|
klass = getattr(content_fetcher, "html_requests")
|
||||||
|
|
||||||
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
if preferred_proxy:
|
||||||
|
proxy_id = preferred_proxy
|
||||||
|
else:
|
||||||
|
proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
|
||||||
|
|
||||||
proxy_url = None
|
proxy_url = None
|
||||||
if proxy_id:
|
if proxy_id:
|
||||||
proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
|
proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
|
||||||
@@ -133,14 +124,15 @@ class perform_site_check(difference_detection_processor):
|
|||||||
# requests for PDF's, images etc should be passwd the is_binary flag
|
# requests for PDF's, images etc should be passwd the is_binary flag
|
||||||
is_binary = watch.is_pdf
|
is_binary = watch.is_pdf
|
||||||
|
|
||||||
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'), is_binary=is_binary)
|
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'),
|
||||||
|
is_binary=is_binary)
|
||||||
fetcher.quit()
|
fetcher.quit()
|
||||||
|
|
||||||
self.screenshot = fetcher.screenshot
|
self.screenshot = fetcher.screenshot
|
||||||
self.xpath_data = fetcher.xpath_data
|
self.xpath_data = fetcher.xpath_data
|
||||||
|
|
||||||
# Track the content type
|
# Track the content type
|
||||||
update_obj['content_type'] = fetcher.headers.get('Content-Type', '')
|
update_obj['content_type'] = fetcher.get_all_headers().get('content-type', '').lower()
|
||||||
|
|
||||||
# Watches added automatically in the queue manager will skip if its the same checksum as the previous run
|
# Watches added automatically in the queue manager will skip if its the same checksum as the previous run
|
||||||
# Saves a lot of CPU
|
# Saves a lot of CPU
|
||||||
@@ -149,7 +141,6 @@ class perform_site_check(difference_detection_processor):
|
|||||||
if update_obj['previous_md5_before_filters'] == watch.get('previous_md5_before_filters'):
|
if update_obj['previous_md5_before_filters'] == watch.get('previous_md5_before_filters'):
|
||||||
raise content_fetcher.checksumFromPreviousCheckWasTheSame()
|
raise content_fetcher.checksumFromPreviousCheckWasTheSame()
|
||||||
|
|
||||||
|
|
||||||
# Fetching complete, now filters
|
# Fetching complete, now filters
|
||||||
# @todo move to class / maybe inside of fetcher abstract base?
|
# @todo move to class / maybe inside of fetcher abstract base?
|
||||||
|
|
||||||
@@ -160,7 +151,7 @@ class perform_site_check(difference_detection_processor):
|
|||||||
# https://stackoverflow.com/questions/41817578/basic-method-chaining ?
|
# https://stackoverflow.com/questions/41817578/basic-method-chaining ?
|
||||||
# return content().textfilter().jsonextract().checksumcompare() ?
|
# return content().textfilter().jsonextract().checksumcompare() ?
|
||||||
|
|
||||||
is_json = 'application/json' in fetcher.headers.get('Content-Type', '')
|
is_json = 'application/json' in fetcher.get_all_headers().get('content-type', '').lower()
|
||||||
is_html = not is_json
|
is_html = not is_json
|
||||||
|
|
||||||
# source: support, basically treat it as plaintext
|
# source: support, basically treat it as plaintext
|
||||||
@@ -168,7 +159,7 @@ class perform_site_check(difference_detection_processor):
|
|||||||
is_html = False
|
is_html = False
|
||||||
is_json = False
|
is_json = False
|
||||||
|
|
||||||
if watch.is_pdf or 'application/pdf' in fetcher.headers.get('Content-Type', '').lower():
|
if watch.is_pdf or 'application/pdf' in fetcher.get_all_headers().get('content-type', '').lower():
|
||||||
from shutil import which
|
from shutil import which
|
||||||
tool = os.getenv("PDF_TO_HTML_TOOL", "pdftohtml")
|
tool = os.getenv("PDF_TO_HTML_TOOL", "pdftohtml")
|
||||||
if not which(tool):
|
if not which(tool):
|
||||||
@@ -192,21 +183,23 @@ class perform_site_check(difference_detection_processor):
|
|||||||
|
|
||||||
fetcher.content = fetcher.content.replace('</body>', metadata + '</body>')
|
fetcher.content = fetcher.content.replace('</body>', metadata + '</body>')
|
||||||
|
|
||||||
|
# Better would be if Watch.model could access the global data also
|
||||||
|
# and then use getattr https://docs.python.org/3/reference/datamodel.html#object.__getitem__
|
||||||
|
# https://realpython.com/inherit-python-dict/ instead of doing it procedurely
|
||||||
|
include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='include_filters')
|
||||||
|
include_filters_rule = [*watch.get('include_filters', []), *include_filters_from_tags]
|
||||||
|
|
||||||
include_filters_rule = deepcopy(watch.get('include_filters', []))
|
subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='subtractive_selectors'),
|
||||||
# include_filters_rule = watch['include_filters']
|
*watch.get("subtractive_selectors", []),
|
||||||
subtractive_selectors = watch.get(
|
*self.datastore.data["settings"]["application"].get("global_subtractive_selectors", [])
|
||||||
"subtractive_selectors", []
|
]
|
||||||
) + self.datastore.data["settings"]["application"].get(
|
|
||||||
"global_subtractive_selectors", []
|
|
||||||
)
|
|
||||||
|
|
||||||
# Inject a virtual LD+JSON price tracker rule
|
# Inject a virtual LD+JSON price tracker rule
|
||||||
if watch.get('track_ldjson_price_data', '') == PRICE_DATA_TRACK_ACCEPT:
|
if watch.get('track_ldjson_price_data', '') == PRICE_DATA_TRACK_ACCEPT:
|
||||||
include_filters_rule.append(html_tools.LD_JSON_PRODUCT_OFFER_SELECTOR)
|
include_filters_rule += html_tools.LD_JSON_PRODUCT_OFFER_SELECTORS
|
||||||
|
|
||||||
has_filter_rule = include_filters_rule and len("".join(include_filters_rule).strip())
|
has_filter_rule = len(include_filters_rule) and len(include_filters_rule[0].strip())
|
||||||
has_subtractive_selectors = subtractive_selectors and len(subtractive_selectors[0].strip())
|
has_subtractive_selectors = len(subtractive_selectors) and len(subtractive_selectors[0].strip())
|
||||||
|
|
||||||
if is_json and not has_filter_rule:
|
if is_json and not has_filter_rule:
|
||||||
include_filters_rule.append("json:$")
|
include_filters_rule.append("json:$")
|
||||||
@@ -221,14 +214,11 @@ class perform_site_check(difference_detection_processor):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
if has_filter_rule:
|
if has_filter_rule:
|
||||||
json_filter_prefixes = ['json:', 'jq:']
|
|
||||||
for filter in include_filters_rule:
|
for filter in include_filters_rule:
|
||||||
if any(prefix in filter for prefix in json_filter_prefixes):
|
if any(prefix in filter for prefix in json_filter_prefixes):
|
||||||
stripped_text_from_html += html_tools.extract_json_as_string(content=fetcher.content, json_filter=filter)
|
stripped_text_from_html += html_tools.extract_json_as_string(content=fetcher.content, json_filter=filter)
|
||||||
is_html = False
|
is_html = False
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if is_html or is_source:
|
if is_html or is_source:
|
||||||
|
|
||||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||||
@@ -236,7 +226,7 @@ class perform_site_check(difference_detection_processor):
|
|||||||
html_content = fetcher.content
|
html_content = fetcher.content
|
||||||
|
|
||||||
# If not JSON, and if it's not text/plain..
|
# If not JSON, and if it's not text/plain..
|
||||||
if 'text/plain' in fetcher.headers.get('Content-Type', '').lower():
|
if 'text/plain' in fetcher.get_all_headers().get('content-type', '').lower():
|
||||||
# Don't run get_text or xpath/css filters on plaintext
|
# Don't run get_text or xpath/css filters on plaintext
|
||||||
stripped_text_from_html = html_content
|
stripped_text_from_html = html_content
|
||||||
else:
|
else:
|
||||||
@@ -279,7 +269,6 @@ class perform_site_check(difference_detection_processor):
|
|||||||
# Re #340 - return the content before the 'ignore text' was applied
|
# Re #340 - return the content before the 'ignore text' was applied
|
||||||
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
|
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
# @todo whitespace coming from missing rtrim()?
|
# @todo whitespace coming from missing rtrim()?
|
||||||
# stripped_text_from_html could be based on their preferences, replace the processed text with only that which they want to know about.
|
# stripped_text_from_html could be based on their preferences, replace the processed text with only that which they want to know about.
|
||||||
# Rewrite's the processing text based on only what diff result they want to see
|
# Rewrite's the processing text based on only what diff result they want to see
|
||||||
@@ -289,13 +278,13 @@ class perform_site_check(difference_detection_processor):
|
|||||||
# needs to not include (added) etc or it may get used twice
|
# needs to not include (added) etc or it may get used twice
|
||||||
# Replace the processed text with the preferred result
|
# Replace the processed text with the preferred result
|
||||||
rendered_diff = diff.render_diff(previous_version_file_contents=watch.get_last_fetched_before_filters(),
|
rendered_diff = diff.render_diff(previous_version_file_contents=watch.get_last_fetched_before_filters(),
|
||||||
newest_version_file_contents=stripped_text_from_html,
|
newest_version_file_contents=stripped_text_from_html,
|
||||||
include_equal=False, # not the same lines
|
include_equal=False, # not the same lines
|
||||||
include_added=watch.get('filter_text_added', True),
|
include_added=watch.get('filter_text_added', True),
|
||||||
include_removed=watch.get('filter_text_removed', True),
|
include_removed=watch.get('filter_text_removed', True),
|
||||||
include_replaced=watch.get('filter_text_replaced', True),
|
include_replaced=watch.get('filter_text_replaced', True),
|
||||||
line_feed_sep="\n",
|
line_feed_sep="\n",
|
||||||
include_change_type_prefix=False)
|
include_change_type_prefix=False)
|
||||||
|
|
||||||
watch.save_last_fetched_before_filters(text_content_before_ignored_filter)
|
watch.save_last_fetched_before_filters(text_content_before_ignored_filter)
|
||||||
|
|
||||||
@@ -310,7 +299,12 @@ class perform_site_check(difference_detection_processor):
|
|||||||
# Treat pages with no renderable text content as a change? No by default
|
# Treat pages with no renderable text content as a change? No by default
|
||||||
empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
|
empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
|
||||||
if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0:
|
if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0:
|
||||||
raise content_fetcher.ReplyWithContentButNoText(url=url, status_code=fetcher.get_last_status_code(), screenshot=screenshot)
|
raise content_fetcher.ReplyWithContentButNoText(url=url,
|
||||||
|
status_code=fetcher.get_last_status_code(),
|
||||||
|
screenshot=screenshot,
|
||||||
|
has_filters=has_filter_rule,
|
||||||
|
html_content=html_content
|
||||||
|
)
|
||||||
|
|
||||||
# We rely on the actual text in the html output.. many sites have random script vars etc,
|
# We rely on the actual text in the html output.. many sites have random script vars etc,
|
||||||
# in the future we'll implement other mechanisms.
|
# in the future we'll implement other mechanisms.
|
||||||
@@ -331,16 +325,25 @@ class perform_site_check(difference_detection_processor):
|
|||||||
regex_matched_output = []
|
regex_matched_output = []
|
||||||
for s_re in extract_text:
|
for s_re in extract_text:
|
||||||
# incase they specified something in '/.../x'
|
# incase they specified something in '/.../x'
|
||||||
regex = self.forward_slash_enclosed_regex_to_options(s_re)
|
if re.search(PERL_STYLE_REGEX, s_re, re.IGNORECASE):
|
||||||
result = re.findall(regex.encode('utf-8'), stripped_text_from_html)
|
regex = html_tools.perl_style_slash_enclosed_regex_to_options(s_re)
|
||||||
|
result = re.findall(regex.encode('utf-8'), stripped_text_from_html)
|
||||||
|
|
||||||
for l in result:
|
for l in result:
|
||||||
if type(l) is tuple:
|
if type(l) is tuple:
|
||||||
# @todo - some formatter option default (between groups)
|
# @todo - some formatter option default (between groups)
|
||||||
regex_matched_output += list(l) + [b'\n']
|
regex_matched_output += list(l) + [b'\n']
|
||||||
else:
|
else:
|
||||||
# @todo - some formatter option default (between each ungrouped result)
|
# @todo - some formatter option default (between each ungrouped result)
|
||||||
regex_matched_output += [l] + [b'\n']
|
regex_matched_output += [l] + [b'\n']
|
||||||
|
else:
|
||||||
|
# Doesnt look like regex, just hunt for plaintext and return that which matches
|
||||||
|
# `stripped_text_from_html` will be bytes, so we must encode s_re also to bytes
|
||||||
|
r = re.compile(re.escape(s_re.encode('utf-8')), re.IGNORECASE)
|
||||||
|
res = r.findall(stripped_text_from_html)
|
||||||
|
if res:
|
||||||
|
for match in res:
|
||||||
|
regex_matched_output += [match] + [b'\n']
|
||||||
|
|
||||||
# Now we will only show what the regex matched
|
# Now we will only show what the regex matched
|
||||||
stripped_text_from_html = b''
|
stripped_text_from_html = b''
|
||||||
|
|||||||
185
changedetectionio/res/puppeteer_fetch.js
Normal file
185
changedetectionio/res/puppeteer_fetch.js
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
module.exports = async ({page, context}) => {
|
||||||
|
|
||||||
|
var {
|
||||||
|
url,
|
||||||
|
execute_js,
|
||||||
|
user_agent,
|
||||||
|
extra_wait_ms,
|
||||||
|
req_headers,
|
||||||
|
include_filters,
|
||||||
|
xpath_element_js,
|
||||||
|
screenshot_quality,
|
||||||
|
proxy_username,
|
||||||
|
proxy_password,
|
||||||
|
disk_cache_dir,
|
||||||
|
no_cache_list,
|
||||||
|
block_url_list,
|
||||||
|
} = context;
|
||||||
|
|
||||||
|
await page.setBypassCSP(true)
|
||||||
|
await page.setExtraHTTPHeaders(req_headers);
|
||||||
|
if (user_agent) {
|
||||||
|
await page.setUserAgent(user_agent);
|
||||||
|
}
|
||||||
|
// https://ourcodeworld.com/articles/read/1106/how-to-solve-puppeteer-timeouterror-navigation-timeout-of-30000-ms-exceeded
|
||||||
|
|
||||||
|
await page.setDefaultNavigationTimeout(0);
|
||||||
|
|
||||||
|
if (proxy_username) {
|
||||||
|
await page.authenticate({
|
||||||
|
username: proxy_username,
|
||||||
|
password: proxy_password
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await page.setViewport({
|
||||||
|
width: 1024,
|
||||||
|
height: 768,
|
||||||
|
deviceScaleFactor: 1,
|
||||||
|
});
|
||||||
|
|
||||||
|
await page.setRequestInterception(true);
|
||||||
|
if (disk_cache_dir) {
|
||||||
|
console.log(">>>>>>>>>>>>>>> LOCAL DISK CACHE ENABLED <<<<<<<<<<<<<<<<<<<<<");
|
||||||
|
}
|
||||||
|
const fs = require('fs');
|
||||||
|
const crypto = require('crypto');
|
||||||
|
|
||||||
|
function file_is_expired(file_path) {
|
||||||
|
if (!fs.existsSync(file_path)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
var stats = fs.statSync(file_path);
|
||||||
|
const now_date = new Date();
|
||||||
|
const expire_seconds = 300;
|
||||||
|
if ((now_date / 1000) - (stats.mtime.getTime() / 1000) > expire_seconds) {
|
||||||
|
console.log("CACHE EXPIRED: " + file_path);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
page.on('request', async (request) => {
|
||||||
|
// General blocking of requests that waste traffic
|
||||||
|
if (block_url_list.some(substring => request.url().toLowerCase().includes(substring))) return request.abort();
|
||||||
|
|
||||||
|
if (disk_cache_dir) {
|
||||||
|
const url = request.url();
|
||||||
|
const key = crypto.createHash('md5').update(url).digest("hex");
|
||||||
|
const dir_path = disk_cache_dir + key.slice(0, 1) + '/' + key.slice(1, 2) + '/' + key.slice(2, 3) + '/';
|
||||||
|
|
||||||
|
// https://stackoverflow.com/questions/4482686/check-synchronously-if-file-directory-exists-in-node-js
|
||||||
|
|
||||||
|
if (fs.existsSync(dir_path + key)) {
|
||||||
|
console.log("* CACHE HIT , using - " + dir_path + key + " - " + url);
|
||||||
|
const cached_data = fs.readFileSync(dir_path + key);
|
||||||
|
// @todo headers can come from dir_path+key+".meta" json file
|
||||||
|
request.respond({
|
||||||
|
status: 200,
|
||||||
|
//contentType: 'text/html', //@todo
|
||||||
|
body: cached_data
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
request.continue();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
if (disk_cache_dir) {
|
||||||
|
page.on('response', async (response) => {
|
||||||
|
const url = response.url();
|
||||||
|
// Basic filtering for sane responses
|
||||||
|
if (response.request().method() != 'GET' || response.request().resourceType() == 'xhr' || response.request().resourceType() == 'document' || response.status() != 200) {
|
||||||
|
console.log("Skipping (not useful) - Status:" + response.status() + " Method:" + response.request().method() + " ResourceType:" + response.request().resourceType() + " " + url);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (no_cache_list.some(substring => url.toLowerCase().includes(substring))) {
|
||||||
|
console.log("Skipping (no_cache_list) - " + url);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (url.toLowerCase().includes('data:')) {
|
||||||
|
console.log("Skipping (embedded-data) - " + url);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
response.buffer().then(buffer => {
|
||||||
|
if (buffer.length > 100) {
|
||||||
|
console.log("Cache - Saving " + response.request().method() + " - " + url + " - " + response.request().resourceType());
|
||||||
|
|
||||||
|
const key = crypto.createHash('md5').update(url).digest("hex");
|
||||||
|
const dir_path = disk_cache_dir + key.slice(0, 1) + '/' + key.slice(1, 2) + '/' + key.slice(2, 3) + '/';
|
||||||
|
|
||||||
|
if (!fs.existsSync(dir_path)) {
|
||||||
|
fs.mkdirSync(dir_path, {recursive: true})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fs.existsSync(dir_path + key)) {
|
||||||
|
if (file_is_expired(dir_path + key)) {
|
||||||
|
fs.writeFileSync(dir_path + key, buffer);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fs.writeFileSync(dir_path + key, buffer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const r = await page.goto(url, {
|
||||||
|
waitUntil: 'load'
|
||||||
|
});
|
||||||
|
|
||||||
|
await page.waitForTimeout(1000);
|
||||||
|
await page.waitForTimeout(extra_wait_ms);
|
||||||
|
|
||||||
|
if (execute_js) {
|
||||||
|
await page.evaluate(execute_js);
|
||||||
|
await page.waitForTimeout(200);
|
||||||
|
}
|
||||||
|
|
||||||
|
var xpath_data;
|
||||||
|
var instock_data;
|
||||||
|
try {
|
||||||
|
// Not sure the best way here, in the future this should be a new package added to npm then run in browserless
|
||||||
|
// (Once the old playwright is removed)
|
||||||
|
xpath_data = await page.evaluate((include_filters) => {%xpath_scrape_code%}, include_filters);
|
||||||
|
instock_data = await page.evaluate(() => {%instock_scrape_code%});
|
||||||
|
} catch (e) {
|
||||||
|
console.log(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Protocol error (Page.captureScreenshot): Cannot take screenshot with 0 width can come from a proxy auth failure
|
||||||
|
// Wrap it here (for now)
|
||||||
|
|
||||||
|
var b64s = false;
|
||||||
|
try {
|
||||||
|
b64s = await page.screenshot({encoding: "base64", fullPage: true, quality: screenshot_quality, type: 'jpeg'});
|
||||||
|
} catch (e) {
|
||||||
|
console.log(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// May fail on very large pages with 'WARNING: tile memory limits exceeded, some content may not draw'
|
||||||
|
if (!b64s) {
|
||||||
|
// @todo after text extract, we can place some overlay text with red background to say 'croppped'
|
||||||
|
console.error('ERROR: content-fetcher page was maybe too large for a screenshot, reverting to viewport only screenshot');
|
||||||
|
try {
|
||||||
|
b64s = await page.screenshot({encoding: "base64", quality: screenshot_quality, type: 'jpeg'});
|
||||||
|
} catch (e) {
|
||||||
|
console.log(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var html = await page.content();
|
||||||
|
return {
|
||||||
|
data: {
|
||||||
|
'content': html,
|
||||||
|
'headers': r.headers(),
|
||||||
|
'instock_data': instock_data,
|
||||||
|
'screenshot': b64s,
|
||||||
|
'status_code': r.status(),
|
||||||
|
'xpath_data': xpath_data
|
||||||
|
},
|
||||||
|
type: 'application/json',
|
||||||
|
};
|
||||||
|
};
|
||||||
@@ -4,35 +4,60 @@ function isItemInStock() {
|
|||||||
'0 in stock',
|
'0 in stock',
|
||||||
'agotado',
|
'agotado',
|
||||||
'artikel zurzeit vergriffen',
|
'artikel zurzeit vergriffen',
|
||||||
'available for back order',
|
|
||||||
'backordered',
|
|
||||||
'brak w magazynie',
|
|
||||||
'brak na stanie',
|
|
||||||
'coming soon',
|
|
||||||
'currently unavailable',
|
|
||||||
'en rupture de stock',
|
|
||||||
'as soon as stock is available',
|
'as soon as stock is available',
|
||||||
|
'ausverkauft', // sold out
|
||||||
|
'available for back order',
|
||||||
|
'back-order or out of stock',
|
||||||
|
'backordered',
|
||||||
|
'benachrichtigt mich', // notify me
|
||||||
|
'brak na stanie',
|
||||||
|
'brak w magazynie',
|
||||||
|
'coming soon',
|
||||||
|
'currently have any tickets for this',
|
||||||
|
'currently unavailable',
|
||||||
|
'dostępne wkrótce',
|
||||||
|
'en rupture de stock',
|
||||||
|
'ist derzeit nicht auf lager',
|
||||||
|
'item is no longer available',
|
||||||
'message if back in stock',
|
'message if back in stock',
|
||||||
'nachricht bei',
|
'nachricht bei',
|
||||||
'nicht auf lager',
|
'nicht auf lager',
|
||||||
'nicht lieferbar',
|
'nicht lieferbar',
|
||||||
'nicht zur verfügung',
|
'nicht zur verfügung',
|
||||||
'no disponible temporalmente',
|
'no disponible temporalmente',
|
||||||
|
'no longer in stock',
|
||||||
|
'no tickets available',
|
||||||
'not available',
|
'not available',
|
||||||
|
'not currently available',
|
||||||
'not in stock',
|
'not in stock',
|
||||||
|
'notify me when available',
|
||||||
|
'não estamos a aceitar encomendas',
|
||||||
'out of stock',
|
'out of stock',
|
||||||
'out-of-stock',
|
'out-of-stock',
|
||||||
'não estamos a aceitar encomendas',
|
|
||||||
'produkt niedostępny',
|
'produkt niedostępny',
|
||||||
'no longer in stock',
|
|
||||||
'sold out',
|
'sold out',
|
||||||
'temporarily out of stock',
|
'temporarily out of stock',
|
||||||
'temporarily unavailable',
|
'temporarily unavailable',
|
||||||
|
'tickets unavailable',
|
||||||
|
'unavailable tickets',
|
||||||
'we do not currently have an estimate of when this product will be back in stock.',
|
'we do not currently have an estimate of when this product will be back in stock.',
|
||||||
'zur zeit nicht an lager',
|
'zur zeit nicht an lager',
|
||||||
|
'已售完',
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
||||||
|
const negateOutOfStockRegexs = [
|
||||||
|
'[0-9] in stock'
|
||||||
|
]
|
||||||
|
var negateOutOfStockRegexs_r = [];
|
||||||
|
for (let i = 0; i < negateOutOfStockRegexs.length; i++) {
|
||||||
|
negateOutOfStockRegexs_r.push(new RegExp(negateOutOfStockRegexs[0], 'g'));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
const elementsWithZeroChildren = Array.from(document.getElementsByTagName('*')).filter(element => element.children.length === 0);
|
const elementsWithZeroChildren = Array.from(document.getElementsByTagName('*')).filter(element => element.children.length === 0);
|
||||||
|
|
||||||
|
// REGEXS THAT REALLY MEAN IT'S IN STOCK
|
||||||
for (let i = elementsWithZeroChildren.length - 1; i >= 0; i--) {
|
for (let i = elementsWithZeroChildren.length - 1; i >= 0; i--) {
|
||||||
const element = elementsWithZeroChildren[i];
|
const element = elementsWithZeroChildren[i];
|
||||||
if (element.offsetWidth > 0 || element.offsetHeight > 0 || element.getClientRects().length > 0) {
|
if (element.offsetWidth > 0 || element.offsetHeight > 0 || element.getClientRects().length > 0) {
|
||||||
@@ -43,13 +68,39 @@ function isItemInStock() {
|
|||||||
elementText = element.textContent.toLowerCase();
|
elementText = element.textContent.toLowerCase();
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const outOfStockText of outOfStockTexts) {
|
if (elementText.length) {
|
||||||
if (elementText.includes(outOfStockText)) {
|
// try which ones could mean its in stock
|
||||||
return elementText; // item is out of stock
|
for (let i = 0; i < negateOutOfStockRegexs.length; i++) {
|
||||||
|
if (negateOutOfStockRegexs_r[i].test(elementText)) {
|
||||||
|
return 'Possibly in stock';
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// OTHER STUFF THAT COULD BE THAT IT'S OUT OF STOCK
|
||||||
|
for (let i = elementsWithZeroChildren.length - 1; i >= 0; i--) {
|
||||||
|
const element = elementsWithZeroChildren[i];
|
||||||
|
if (element.offsetWidth > 0 || element.offsetHeight > 0 || element.getClientRects().length > 0) {
|
||||||
|
var elementText="";
|
||||||
|
if (element.tagName.toLowerCase() === "input") {
|
||||||
|
elementText = element.value.toLowerCase();
|
||||||
|
} else {
|
||||||
|
elementText = element.textContent.toLowerCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (elementText.length) {
|
||||||
|
// and these mean its out of stock
|
||||||
|
for (const outOfStockText of outOfStockTexts) {
|
||||||
|
if (elementText.includes(outOfStockText)) {
|
||||||
|
return elementText; // item is out of stock
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return 'Possibly in stock'; // possibly in stock, cant decide otherwise.
|
return 'Possibly in stock'; // possibly in stock, cant decide otherwise.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -8,8 +8,14 @@
|
|||||||
// Some pages like https://www.londonstockexchange.com/stock/NCCL/ncondezi-energy-limited/analysis
|
// Some pages like https://www.londonstockexchange.com/stock/NCCL/ncondezi-energy-limited/analysis
|
||||||
// will automatically force a scroll somewhere, so include the position offset
|
// will automatically force a scroll somewhere, so include the position offset
|
||||||
// Lets hope the position doesnt change while we iterate the bbox's, but this is better than nothing
|
// Lets hope the position doesnt change while we iterate the bbox's, but this is better than nothing
|
||||||
|
var scroll_y = 0;
|
||||||
|
try {
|
||||||
|
scroll_y = +document.documentElement.scrollTop || document.body.scrollTop
|
||||||
|
} catch (e) {
|
||||||
|
console.log(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
var scroll_y=+document.documentElement.scrollTop || document.body.scrollTop
|
|
||||||
|
|
||||||
// Include the getXpath script directly, easier than fetching
|
// Include the getXpath script directly, easier than fetching
|
||||||
function getxpath(e) {
|
function getxpath(e) {
|
||||||
@@ -38,15 +44,15 @@ const findUpTag = (el) => {
|
|||||||
if (el.name !== undefined && el.name.length) {
|
if (el.name !== undefined && el.name.length) {
|
||||||
var proposed = el.tagName + "[name=" + el.name + "]";
|
var proposed = el.tagName + "[name=" + el.name + "]";
|
||||||
var proposed_element = window.document.querySelectorAll(proposed);
|
var proposed_element = window.document.querySelectorAll(proposed);
|
||||||
if(proposed_element.length) {
|
if (proposed_element.length) {
|
||||||
if (proposed_element.length === 1) {
|
if (proposed_element.length === 1) {
|
||||||
return proposed;
|
return proposed;
|
||||||
} else {
|
} else {
|
||||||
// Some sites change ID but name= stays the same, we can hit it if we know the index
|
// Some sites change ID but name= stays the same, we can hit it if we know the index
|
||||||
// Find all the elements that match and work out the input[n]
|
// Find all the elements that match and work out the input[n]
|
||||||
var n=Array.from(proposed_element).indexOf(el);
|
var n = Array.from(proposed_element).indexOf(el);
|
||||||
// Return a Playwright selector for nthinput[name=zipcode]
|
// Return a Playwright selector for nthinput[name=zipcode]
|
||||||
return proposed+" >> nth="+n;
|
return proposed + " >> nth=" + n;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
# exit when any command fails
|
# exit when any command fails
|
||||||
set -e
|
set -e
|
||||||
|
# enable debug
|
||||||
|
set -x
|
||||||
|
|
||||||
# Test proxy list handling, starting two squids on different ports
|
# Test proxy list handling, starting two squids on different ports
|
||||||
# Each squid adds a different header to the response, which is the main thing we test for.
|
# Each squid adds a different header to the response, which is the main thing we test for.
|
||||||
@@ -19,7 +21,6 @@ docker run --network changedet-network -d \
|
|||||||
|
|
||||||
|
|
||||||
## 2nd test actually choose the preferred proxy from proxies.json
|
## 2nd test actually choose the preferred proxy from proxies.json
|
||||||
|
|
||||||
docker run --network changedet-network \
|
docker run --network changedet-network \
|
||||||
-v `pwd`/tests/proxy_list/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json \
|
-v `pwd`/tests/proxy_list/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json \
|
||||||
test-changedetectionio \
|
test-changedetectionio \
|
||||||
@@ -44,7 +45,6 @@ fi
|
|||||||
|
|
||||||
|
|
||||||
# Test the UI configurable proxies
|
# Test the UI configurable proxies
|
||||||
|
|
||||||
docker run --network changedet-network \
|
docker run --network changedet-network \
|
||||||
test-changedetectionio \
|
test-changedetectionio \
|
||||||
bash -c 'cd changedetectionio && pytest tests/proxy_list/test_select_custom_proxy.py'
|
bash -c 'cd changedetectionio && pytest tests/proxy_list/test_select_custom_proxy.py'
|
||||||
@@ -58,4 +58,25 @@ then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Test "no-proxy" option
|
||||||
|
docker run --network changedet-network \
|
||||||
|
test-changedetectionio \
|
||||||
|
bash -c 'cd changedetectionio && pytest tests/proxy_list/test_noproxy.py'
|
||||||
|
|
||||||
|
# We need to handle grep returning 1
|
||||||
|
set +e
|
||||||
|
# Check request was never seen in any container
|
||||||
|
for c in $(echo "squid-one squid-two squid-custom"); do
|
||||||
|
echo Checking $c
|
||||||
|
docker logs $c &> $c.txt
|
||||||
|
grep noproxy $c.txt
|
||||||
|
if [ $? -ne 1 ]
|
||||||
|
then
|
||||||
|
echo "Saw request for noproxy in $c container"
|
||||||
|
cat $c.txt
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
|
||||||
docker kill squid-one squid-two squid-custom
|
docker kill squid-one squid-two squid-custom
|
||||||
|
|||||||
58
changedetectionio/static/images/brightdata.svg
Normal file
58
changedetectionio/static/images/brightdata.svg
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<svg
|
||||||
|
height="59.553207"
|
||||||
|
viewBox="-0.36 95.21 25.082135 59.553208"
|
||||||
|
width="249.99138"
|
||||||
|
version="1.1"
|
||||||
|
id="svg12"
|
||||||
|
sodipodi:docname="brightdata.svg"
|
||||||
|
inkscape:version="1.1.2 (0a00cf5339, 2022-02-04)"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg">
|
||||||
|
<defs
|
||||||
|
id="defs16" />
|
||||||
|
<sodipodi:namedview
|
||||||
|
id="namedview14"
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#666666"
|
||||||
|
borderopacity="1.0"
|
||||||
|
inkscape:pageshadow="2"
|
||||||
|
inkscape:pageopacity="0.0"
|
||||||
|
inkscape:pagecheckerboard="0"
|
||||||
|
showgrid="false"
|
||||||
|
fit-margin-top="0"
|
||||||
|
fit-margin-left="0"
|
||||||
|
fit-margin-right="0"
|
||||||
|
fit-margin-bottom="0"
|
||||||
|
inkscape:zoom="0.9464"
|
||||||
|
inkscape:cx="22.189349"
|
||||||
|
inkscape:cy="-90.870668"
|
||||||
|
inkscape:window-width="1920"
|
||||||
|
inkscape:window-height="1051"
|
||||||
|
inkscape:window-x="1920"
|
||||||
|
inkscape:window-y="0"
|
||||||
|
inkscape:window-maximized="1"
|
||||||
|
inkscape:current-layer="svg12" />
|
||||||
|
<path
|
||||||
|
d="m -34.416031,129.28 c -3.97,-2.43 -5.1,-6.09 -4.32,-10.35 0.81,-4.4 3.95,-6.75 8.04,-7.75 4.23,-1.04 8.44,-0.86 12.3,1.5 0.63,0.39 0.93,0.03 1.31,-0.29 1.5,-1.26 3.27,-1.72 5.189999,-1.83 0.79,-0.05 1.04,0.24 1.01,1.01 -0.05,1.31 -0.04,2.63 0,3.95 0.02,0.65 -0.19,0.93 -0.87,0.89 -0.889999,-0.04 -1.789999,0.03 -2.669999,-0.02 -0.82,-0.04 -1.08,0.1 -0.88,1.04 0.83,3.9 -0.06,7.37 -3.1,10.06 -2.76,2.44 -6.13,3.15 -9.72,3.04 -0.51,-0.02 -1.03,-0.02 -1.52,-0.13 -1.22,-0.25 -1.96,0.14 -2.19,1.41 -0.28,1.54 0.16,2.62 1.37,3.07 0.84,0.31 1.74,0.35 2.63,0.39 2.97,0.13 5.95,-0.18 8.91,0.21 2.93,0.39 5.69,1.16 6.85,4.25 1.269999,3.38 0.809999,6.62 -1.48,9.47 -2.73,3.39 -6.52,4.78 -10.66,5.33 -3.53,0.48 -7.04,0.27 -10.39,-1.11 -3.89,-1.6 -5.75,-4.95 -4.84,-8.72 0.51,-2.11 1.85,-3.58 3.69,-4.65 0.38,-0.22 0.93,-0.32 0.28,-0.96 -2.91,-2.83 -2.85,-6.16 0.1,-8.95 0.28,-0.26 0.6,-0.53 0.96,-0.86 z m 8.07,21.5 c 0.95,0.04 1.87,-0.13 2.78,-0.33 1.89,-0.42 3.51,-1.3 4.49,-3.06 1.82,-3.25 0.24,-6.2 -3.37,-6.58 -2.88,-0.3 -5.76,0.24 -8.63,-0.13 -0.53,-0.07 -0.75,0.34 -0.95,0.71 -1.16,2.24 -1.08,4.53 0,6.73 1.15,2.34 3.46,2.48 5.68,2.66 z m -5,-30.61 c -0.03,1.67 0.08,3.19 0.74,4.61 0.76,1.62 2.17,2.42 4.03,2.31 1.62,-0.1 2.9,-1.12 3.36,-2.84 0.66,-2.46 0.69,-4.95 0.01,-7.42 -0.49,-1.76 -1.7,-2.64 -3.56,-2.7 -2.08,-0.07 -3.37,0.7 -4.04,2.42 -0.47,1.21 -0.6,2.47 -0.54,3.62 z m 32.9399993,6.56 c 0,2.59 0.05,5.18 -0.02,7.77 -0.03,1.03 0.31,1.46 1.32,1.52 0.65,0.04 1.61,-0.09 1.82,0.57 0.26,0.81 0.11,1.76 0.06,2.65 -0.03,0.48 -0.81,0.39 -0.81,0.39 l -11.47,0.01 c 0,0 -0.95,-0.21 -0.88,-0.88 0.03,-0.29 0.04,-0.6 0,-0.89 -0.19,-1.24 0.21,-1.92 1.58,-1.9 0.99,0.01 1.28,-0.52 1.28,-1.53 -0.05,-8.75 -0.05,-17.49 0,-26.24 0.01,-1.15 -0.36,-1.62 -1.44,-1.67 -0.17,-0.01 -0.34,-0.04 -0.5,-0.07 -1.43,-0.22 -2.12,-1.57 -1.53,-2.91 0.15,-0.35 0.43,-0.36 0.72,-0.4 2.94,-0.41 5.88,-0.81 8.82000002,-1.23 0.81999998,-0.12 0.99999998,0.27 0.98999998,1.01 -0.02,3.35 0,6.71 0.02,10.06 0,0.35 -0.23,0.84 0.18,1.03 0.38,0.17 0.69,-0.25 0.99,-0.45 2.56,-1.74 5.33,-2.73 8.4900007,-2.56 3.51005,0.19 5.65005,1.95 6.35005,5.46 0.42,2.09 0.52,4.21 0.51,6.33 -0.02,3.86 0.05,7.73 -0.04,11.59 -0.02,1.12 0.37,1.5 1.39,1.6 0.61,0.05 1.55,-0.13 1.74,0.47 0.26,0.85 0.12,1.84 0.1,2.77 -0.01,0.41 -0.69,0.37 -0.69,0.37 l -11.4700504,0.01 c 0,0 -0.81,-0.29 -0.8,-0.85 0.01,-0.38 0.04,-0.77 -0.01,-1.15 -0.13,-1.01 0.32,-1.52 1.31,-1.56 1.0600004,-0.05 1.3800004,-0.55 1.3500004,-1.63 -0.14,-4.84 0.16,-9.68 -0.18,-14.51 -0.26,-3.66 -2.1100004,-4.95 -5.6700007,-3.99 -0.25,0.07 -0.49,0.15 -0.73,0.22 -2.57,0.8 -2.79,1.09 -2.79,3.71 0.01,2.3 0.01,4.59 0.01,6.88 z M -109.26603,122.56 c 0,-4.75 -0.02,-9.51 0.02,-14.26 0.01,-0.92 -0.17,-1.47 -1.19,-1.45 -0.16,0 -0.33,-0.07 -0.5,-0.1 -1.56,-0.27 -2.24,-1.47 -1.69,-2.92 0.14,-0.37 0.41,-0.38 0.7,-0.42 2.98,-0.41 5.97,-0.81 8.94,-1.24 0.85,-0.12 0.88,0.33 0.88,0.96 -0.01,3.01 -0.01,6.03 0,9.04 0,0.4 -0.18,0.96 0.27,1.16 0.36,0.16 0.66,-0.3 0.96,-0.52 4.729999,-3.51 12.459999,-2.61 14.889999,4.48 1.89,5.51 1.91,11.06 -0.96,16.28 -2.37,4.31 -6.19,6.49 -11.15,6.59 -3.379999,0.07 -6.679999,-0.3 -9.909999,-1.37 -0.93,-0.31 -1.3,-0.78 -1.28,-1.83 0.05,-4.81 0.02,-9.6 0.02,-14.4 z m 7.15,3.89 c 0,2.76 0.02,5.52 -0.01,8.28 -0.01,0.76 0.18,1.29 0.91,1.64 1.899999,0.9 4.299999,0.5 5.759999,-1.01 0.97,-1 1.56,-2.21 1.96,-3.52 1.03,-3.36 0.97,-6.78 0.61,-10.22 a 9.991,9.991 0 0 0 -0.93,-3.29 c -1.47,-3.06 -4.67,-3.85 -7.439999,-1.86 -0.6,0.43 -0.88,0.93 -0.87,1.7 0.04,2.76 0.01,5.52 0.01,8.28 z"
|
||||||
|
fill="#4280f6"
|
||||||
|
id="path2" />
|
||||||
|
<path
|
||||||
|
d="m 68.644019,137.2 c -1.62,1.46 -3.41,2.56 -5.62,2.96 -4.4,0.8 -8.7,-1.39 -10.49,-5.49 -2.31,-5.31 -2.3,-10.67 -0.1,-15.98 2.31,-5.58 8.29,-8.65 14.24,-7.46 1.71,0.34 1.9,0.18 1.9,-1.55 0,-0.68 -0.05,-1.36 0.01,-2.04 0.09,-1.02 -0.25,-1.54 -1.34,-1.43 -0.64,0.06 -1.26,-0.1 -1.88,-0.21 -1.32,-0.24 -1.6,-0.62 -1.37,-1.97 0.07,-0.41 0.25,-0.57 0.65,-0.62 2.63,-0.33 5.27,-0.66 7.9,-1.02 1.04,-0.14 1.17,0.37 1.17,1.25 -0.02,10.23 -0.02,20.45 -0.01,30.68 v 1.02 c 0.02,0.99 0.35,1.6 1.52,1.47 0.52,-0.06 1.35,-0.27 1.25,0.73 -0.08,0.8 0.58,1.93 -0.94,2.18 -1.29,0.22 -2.51,0.69 -3.86,0.65 -2.04,-0.06 -2.3,-0.23 -2.76,-2.19 -0.09,-0.3 0.06,-0.67 -0.27,-0.98 z m -0.07,-12.46 c 0,-2.8 -0.04,-5.6 0.02,-8.39 0.02,-0.9 -0.28,-1.47 -1.05,-1.81 -3.18,-1.4 -7.54,-0.8 -9.3,2.87 -0.83,1.74 -1.31,3.54 -1.49,5.46 -0.28,2.93 -0.38,5.83 0.61,8.65 0.73,2.09 1.81,3.9 4.11,4.67 2.49,0.83 4.55,-0.04 6.5,-1.48 0.54,-0.4 0.62,-0.95 0.61,-1.57 -0.02,-2.8 -0.01,-5.6 -0.01,-8.4 z m 28.79,2.53 c 0,3.24 0.04,5.83 -0.02,8.41 -0.02,1 0.19,1.49 1.309998,1.41 0.55,-0.04 1.460003,-0.46 1.520003,0.73 0.05,1.02 0.1,1.89 -1.330003,2.08 -1.289998,0.17 -2.559998,0.51 -3.889998,0.48 -1.88,-0.05 -2.15,-0.26 -2.42,-2.15 -0.04,-0.27 0.14,-0.65 -0.22,-0.79 -0.34,-0.13 -0.5,0.24 -0.72,0.42 -3.61,3 -8.15,3.4 -11.64,1.08 -1.61,-1.07 -2.49,-2.63 -2.67,-4.43 -0.51,-5.13 0.77,-7.91 6.3,-10.22 2.44,-1.02 5.07,-1.27 7.68,-1.49 0.77,-0.07 1.03,-0.28 1.02,-1.05 -0.03,-1.48 -0.05,-2.94 -0.64,-4.36 -0.59,-1.42 -1.67,-1.92 -3.08,-2.03 -3.04,-0.24 -5.88,0.5 -8.63,1.71 -0.51,0.23 -1.19,0.75 -1.48,-0.13 -0.26,-0.77 -1.35,-1.61 0.05,-2.47 3.27,-2 6.7,-3.44 10.61,-3.42 1.44,0.01 2.88,0.27 4.21,0.81 2.67,1.08 3.44,3.4 3.8,5.99 0.46,3.37 0.1,6.73 0.24,9.42 z m -5.09,2.9 c 0,-1.23 -0.01,-2.46 0,-3.69 0,-0.52 -0.06,-0.98 -0.75,-0.84 -1.45,0.3 -2.93,0.28 -4.37,0.69 -3.71,1.04 -5.46,4.48 -3.97,8.03 0.51,1.22 1.48,1.98 2.79,2.16 2.01,0.28 3.86,-0.29 5.6,-1.28 0.54,-0.31 0.73,-0.76 0.72,-1.37 -0.05,-1.23 -0.02,-2.47 -0.02,-3.7 z m 43.060001,-2.89 c 0,2.72 0.01,5.43 -0.01,8.15 0,0.66 0.02,1.21 0.91,1.12 0.54,-0.06 0.99,0.12 0.86,0.75 -0.15,0.71 0.56,1.7 -0.58,2.09 -1.55,0.52 -3.16,0.59 -4.77,0.4 -0.99,-0.12 -1.12,-1.01 -1.18,-1.73 -0.08,-1.15 -0.16,-1.45 -1.24,-0.54 -3.41,2.87 -8.05,3.17 -11.43,0.88 -1.75,-1.18 -2.49,-2.91 -2.7,-4.94 -0.64,-6.24 3.16,-8.74 7.83,-10.17 2.04,-0.62 4.14,-0.8 6.24,-0.99 0.81,-0.07 1,-0.36 0.98,-1.09 -0.04,-1.31 0.04,-2.62 -0.42,-3.89 -0.57,-1.57 -1.53,-2.34 -3.18,-2.45 -3.03,-0.21 -5.88,0.46 -8.64,1.66 -0.6,0.26 -1.25,0.81 -1.68,-0.2 -0.34,-0.8 -1.08,-1.61 0.16,-2.36 4.12,-2.5 8.44,-4.16 13.36,-3.07 3.21,0.71 4.89,2.91 5.26,6.34 0.18,1.69 0.22,3.37 0.22,5.07 0.01,1.66 0.01,3.32 0.01,4.97 z m -5.09,2.54 c 0,-1.27 -0.03,-2.54 0.01,-3.81 0.02,-0.74 -0.27,-1.02 -0.98,-0.92 -1.21,0.17 -2.43,0.28 -3.62,0.55 -3.72,0.83 -5.47,3.48 -4.82,7.21 0.29,1.66 1.57,2.94 3.21,3.16 2.02,0.27 3.85,-0.34 5.57,-1.34 0.49,-0.29 0.64,-0.73 0.63,-1.29 -0.02,-1.18 0,-2.37 0,-3.56 z"
|
||||||
|
fill="#c8dbfb"
|
||||||
|
id="path4" />
|
||||||
|
<path
|
||||||
|
d="m 26.314019,125.77 c 0,-2.89 -0.05,-5.77 0.02,-8.66 0.03,-1.04 -0.33,-1.39 -1.31,-1.24 a 0.7,0.7 0 0 1 -0.25,0 c -0.57,-0.18 -1.44,0.48 -1.68,-0.58 -0.35,-1.48 -0.02,-2.3 1.21,-2.7 1.3,-0.43 2.16,-1.26 2.76,-2.46 0.78,-1.56 1.44,-3.17 1.91,-4.84 0.18,-0.63 0.47,-0.86 1.15,-0.88 3.28,-0.09 3.27,-0.11 3.32,3.17 0.01,1.06 0.09,2.12 0.09,3.18 -0.01,0.67 0.27,0.89 0.91,0.88 1.61,-0.02 3.23,0.03 4.84,-0.02 0.77,-0.02 1.01,0.23 1.03,1.01 0.08,3.27 0.1,3.27 -3.09,3.27 -0.93,0 -1.87,0.03 -2.8,-0.01 -0.67,-0.02 -0.89,0.26 -0.88,0.91 0.04,5.43 0.04,10.86 0.12,16.29 0.02,1.7 0.75,2.26 2.46,2.1 1.1,-0.1 2.19,-0.26 3.23,-0.65 0.59,-0.22 0.89,-0.09 1.14,0.53 0.93,2.29 0.92,2.37 -1.32,3.52 -2.54,1.3 -5.22,1.99 -8.1,1.79 -2.27,-0.16 -3.68,-1.27 -4.35,-3.45 -0.3,-0.98 -0.41,-1.99 -0.41,-3.01 z m -97.67005,-8.99 c 0.57,-0.84 1.11,-1.74 1.76,-2.55 1.68,-2.09 3.68,-3.62 6.54,-3.66 1.08,-0.01 1.63,0.28 1.57,1.52 -0.1,2.08 -0.05,4.16 -0.02,6.24 0.01,0.74 -0.17,0.96 -0.96,0.76 -2.36,-0.59 -4.71,-0.42 -7.03,0.28 -0.8,0.24 -1.16,0.62 -1.15,1.52 0.05,4.5 0.04,9 0,13.5 -0.01,0.89 0.29,1.16 1.15,1.2 1.23,0.06 2.44,0.32 3.67,0.39 0.75,0.05 0.91,0.38 0.89,1.04 -0.06,2.86 0.29,2.28 -2.25,2.3 -4.2,0.04 -8.41,-0.02 -12.61,0.03 -0.91,0.01 -1.39,-0.18 -1.22,-1.18 0.02,-0.12 0,-0.25 0,-0.38 0.02,-2.1 -0.24,-1.88 1.77,-2.04 1.33,-0.11 1.6,-0.67 1.58,-1.9 -0.07,-5.35 -0.04,-10.7 -0.02,-16.05 0,-0.78 -0.17,-1.2 -1,-1.46 -2.21,-0.68 -2.7,-1.69 -2.22,-3.99 0.11,-0.52 0.45,-0.56 0.82,-0.62 2.22,-0.34 4.44,-0.7 6.67,-0.99 0.99,-0.13 1.82,0.7 1.84,1.76 0.03,1.4 0.03,2.8 0.04,4.2 -0.01,0.02 0.06,0.04 0.18,0.08 z m 25.24,6.59 c 0,3.69 0.04,7.38 -0.03,11.07 -0.02,1.04 0.31,1.48 1.32,1.49 0.29,0 0.59,0.12 0.88,0.13 0.93,0.01 1.18,0.47 1.16,1.37 -0.05,2.19 0,2.19 -2.24,2.19 -3.48,0 -6.96,-0.04 -10.44,0.03 -1.09,0.02 -1.47,-0.33 -1.3,-1.36 0.02,-0.12 0.02,-0.26 0,-0.38 -0.28,-1.39 0.39,-1.96 1.7,-1.9 1.36,0.06 1.76,-0.51 1.74,-1.88 -0.09,-5.17 -0.08,-10.35 0,-15.53 0.02,-1.22 -0.32,-1.87 -1.52,-2.17 -0.57,-0.14 -1.47,-0.11 -1.57,-0.85 -0.15,-1.04 -0.05,-2.11 0.01,-3.17 0.02,-0.34 0.44,-0.35 0.73,-0.39 2.81,-0.39 5.63,-0.77 8.44,-1.18 0.92,-0.14 1.15,0.2 1.14,1.09 -0.04,3.8 -0.02,7.62 -0.02,11.44 z"
|
||||||
|
fill="#4280f6"
|
||||||
|
id="path6" />
|
||||||
|
<path
|
||||||
|
d="m 101.44402,125.64 c 0,-3.18 -0.03,-6.37 0.02,-9.55 0.02,-0.94 -0.26,-1.36 -1.22,-1.22 -0.21,0.03 -0.430003,0.04 -0.630003,0 -0.51,-0.12 -1.35,0.39 -1.44,-0.55 -0.08,-0.85 -0.429998,-1.87 0.93,-2.24 2.080003,-0.57 2.720003,-2.39 3.350003,-4.17 0.31,-0.88 0.62,-1.76 0.87,-2.66 0.18,-0.64 0.52,-0.85 1.19,-0.84 2.46,0.05 2,-0.15 2.04,2.04 0.02,1.1 0.08,2.21 -0.02,3.31 -0.11,1.16 0.46,1.52 1.46,1.53 1.78,0.01 3.57,0.04 5.35,-0.01 0.82,-0.02 1.12,0.23 1.11,1.08 -0.05,2.86 0.19,2.49 -2.42,2.51 -1.53,0.01 -3.06,0.02 -4.59,-0.01 -0.65,-0.01 -0.9,0.22 -0.9,0.89 0.02,5.52 0,11.04 0.03,16.56 0,0.67 0.14,1.34 0.25,2.01 0.17,1.04 1.17,1.62 2.59,1.42 1.29,-0.19 2.57,-0.49 3.86,-0.69 0.43,-0.07 1.05,-0.47 1.19,0.4 0.12,0.75 1.05,1.61 -0.09,2.24 -2.09,1.16 -4.28,2.07 -6.71,2.16 -1.05,0.04 -2.13,0.2 -3.16,-0.14 -1.92,-0.65 -3.03,-2.28 -3.05,-4.51 -0.02,-3.19 -0.01,-6.37 -0.01,-9.56 z"
|
||||||
|
fill="#c8dbfb"
|
||||||
|
id="path8" />
|
||||||
|
<path
|
||||||
|
d="m -50.816031,95.21 c 0.19,2.160002 1.85,3.240002 2.82,4.740002 0.25,0.379998 0.48,0.109998 0.67,-0.16 0.21,-0.31 0.6,-1.21 1.15,-1.28 -0.35,1.38 -0.04,3.149998 0.16,4.449998 0.49,3.05 -1.22,5.64 -4.07,6.18 -3.38,0.65 -6.22,-2.21 -5.6,-5.62 0.23,-1.24 1.37,-2.5 0.77,-3.699998 -0.85,-1.7 0.54,-0.52 0.79,-0.22 1.04,1.199998 1.21,0.09 1.45,-0.55 0.24,-0.63 0.31,-1.31 0.47,-1.97 0.19,-0.770002 0.55,-1.400002 1.39,-1.870002 z"
|
||||||
|
fill="#4280f6"
|
||||||
|
id="path10" />
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 11 KiB |
37
changedetectionio/static/images/email.svg
Normal file
37
changedetectionio/static/images/email.svg
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
||||||
|
|
||||||
|
<svg
|
||||||
|
fill="#FFFFFF"
|
||||||
|
height="7.5005589"
|
||||||
|
width="11.248507"
|
||||||
|
version="1.1"
|
||||||
|
id="Layer_1"
|
||||||
|
viewBox="0 0 7.1975545 4.7993639"
|
||||||
|
xml:space="preserve"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg"><defs
|
||||||
|
id="defs19" />
|
||||||
|
<g
|
||||||
|
id="g14"
|
||||||
|
transform="matrix(-0.01406065,0,0,0.01406065,7.1975543,-1.1990922)">
|
||||||
|
<g
|
||||||
|
id="g12">
|
||||||
|
<g
|
||||||
|
id="g10">
|
||||||
|
<path
|
||||||
|
d="M 468.373,85.28 H 45.333 C 21.227,85.28 0,105.76 0,129.014 V 383.2 c 0,23.147 21.227,43.413 45.333,43.413 h 422.933 c 23.68,0 43.627,-19.84 43.627,-43.413 V 129.014 C 512,105.334 492.053,85.28 468.373,85.28 Z m 0,320 H 45.333 c -12.373,0 -24,-10.773 -24,-22.08 V 129.014 c 0,-11.307 11.84,-22.4 24,-22.4 h 422.933 c 11.733,0 22.293,10.667 22.293,22.4 V 383.2 h 0.107 c 10e-4,11.734 -10.453,22.08 -22.293,22.08 z"
|
||||||
|
id="path2" />
|
||||||
|
<path
|
||||||
|
d="m 440.853,153.974 c -3.307,-4.907 -9.92,-6.187 -14.827,-2.987 L 256,264.48 85.973,151.094 c -4.907,-3.2 -11.52,-1.707 -14.72,3.2 -3.093,4.8 -1.813,11.307 2.88,14.507 l 176,117.333 c 3.627,2.347 8.213,2.347 11.84,0 l 176,-117.333 c 4.8,-3.201 6.187,-9.921 2.88,-14.827 z"
|
||||||
|
id="path4" />
|
||||||
|
<path
|
||||||
|
d="m 143.573,257.654 c -0.107,0.107 -0.32,0.213 -0.427,0.32 L 68.48,311.307 c -4.907,3.307 -6.187,9.92 -2.88,14.827 3.307,4.907 9.92,6.187 14.827,2.88 0.107,-0.107 0.32,-0.213 0.427,-0.32 l 74.667,-53.333 c 4.907,-3.307 6.187,-9.92 2.88,-14.827 -3.308,-4.907 -9.921,-6.187 -14.828,-2.88 z"
|
||||||
|
id="path6" />
|
||||||
|
<path
|
||||||
|
d="m 443.947,311.627 c -0.107,-0.107 -0.32,-0.213 -0.427,-0.32 l -74.667,-53.333 c -4.693,-3.52 -11.413,-2.56 -14.933,2.133 -3.52,4.693 -2.56,11.413 2.133,14.933 0.107,0.107 0.32,0.213 0.427,0.32 l 74.667,53.333 c 4.693,3.52 11.413,2.56 14.933,-2.133 3.52,-4.693 2.56,-11.413 -2.133,-14.933 z"
|
||||||
|
id="path8" />
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 1.9 KiB |
3
changedetectionio/static/images/generic-icon.svg
Normal file
3
changedetectionio/static/images/generic-icon.svg
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||||
|
<svg width="61.649mm" height="61.649mm" version="1.1" viewBox="0 0 61.649 61.649" xml:space="preserve" xmlns="http://www.w3.org/2000/svg"><g transform="translate(66.269 -15.463)" fill="#3056d3"><g transform="matrix(1.423 0 0 1.423 101.16 69.23)" fill="#3056d3"><g transform="matrix(.8229 0 0 .8229 -23.378 -2.3935)" fill="#3056d3"><path d="m-88.248-43.007a26.323 26.323 0 0 0-26.323 26.323 26.323 26.323 0 0 0 26.323 26.323 26.323 26.323 0 0 0 26.323-26.323 26.323 26.323 0 0 0-26.323-26.323zm0 2.8417a23.482 23.482 0 0 1 23.482 23.482 23.482 23.482 0 0 1-23.482 23.482 23.482 23.482 0 0 1-23.482-23.482 23.482 23.482 0 0 1 23.482-23.482z"/><g transform="matrix(.26458 0 0 .26458 -115.65 -44.085)"><path d="m33.02 64.43c0.35-0.05 2.04-0.13 2.04-0.13h25.53s3.17 0.32 3.67 0.53c2.5 1.05 3.98 1.89 6.04 3.57 0.72 0.58 4.12 4.01 4.12 4.01l51.67 57.39s1.61 1.65 1.97 1.94c1.2 0.97 2.48 1.96 3.98 2.32 0.5 0.12 2.72 0.21 2.72 0.21h27.32l-8.83-9.04s-1.31-1.65-1.44-1.94c-0.45-0.93-0.59-2.59-0.13-3.51 0.35-0.69 1.46-1.87 2.23-1.98 1.03-0.14 2.12-0.39 3.02 0.14 0.33 0.2 1.64 1.32 1.64 1.32l17.49 17.49s1.35 1.09 1.6 1.6c0.17 0.34 0.29 0.82 0.15 1.18-0.17 0.42-1.42 1.63-1.42 1.63l-0.94 0.98-15.69 16.37s-1.44 1.4-1.79 1.67c-0.76 0.6-1.99 0.89-2.96 0.9-1.03 0-2.62-1.11-3.26-1.91-0.6-0.76-1.1-2.22-0.77-3.13 0.16-0.45 1.28-1.85 1.28-1.85l11.36-11.3-29.47-0.02-1.68 0.09s-4.16-0.66-5.26-1.03c-1.63-0.56-3.44-1.82-4.75-2.93-0.39-0.33-1.8-1.92-1.8-1.92l-51.7-59.28s-2-2.06-2.43-2.43c-1.37-1.17-2-1.62-3.76-2.34-0.44-0.18-3.45-0.55-3.45-0.55l-24.13-0.22s-2.23-0.15-2.61-0.22c-1.08-0.21-2.16-1.07-2.81-1.83-0.79-0.92-0.59-3.06 0.06-4.09 0.57-0.89 2.14-1.52 3.19-1.66z"/><path d="m86.1 109.7-17.13 19.65s-2 2.06-2.43 2.43c-1.37 1.17-2 1.62-3.76 2.34-0.44 0.18-3.45 0.55-3.45 0.55l-24.13 0.22s-2.23 0.15-2.61 0.22c-1.08 0.21-2.16 1.07-2.81 1.83-0.79 0.92-0.59 3.06 0.06 4.09 0.57 0.89 2.14 1.52 3.19 1.66 0.35 0.05 2.04 0.13 2.04 0.13h25.53s3.17-0.32 3.67-0.53c2.5-1.05 3.98-1.89 6.04-3.57 0.72-0.58 4.12-4.01 4.12-4.01l17.38-19.3z"/><path d="m177.81 67.6c-0.17-0.42-1.42-1.63-1.42-1.63l-0.94-0.98-15.69-16.37s-1.44-1.4-1.79-1.67c-0.76-0.6-1.99-0.89-2.96-0.9-1.03 0-2.62 1.11-3.26 1.91-0.6 0.76-1.1 2.22-0.77 3.13 0.16 0.45 1.28 1.85 1.28 1.85l11.36 11.3-29.47 0.02-1.68-0.09s-4.16 0.66-5.26 1.03c-1.63 0.56-3.44 1.82-4.75 2.93-0.39 0.33-1.8 1.92-1.8 1.92l-18.91 21.69 5.98 5.98 18.38-20.41s1.61-1.65 1.97-1.94c1.2-0.97 2.48-1.96 3.98-2.32 0.5-0.12 2.72-0.21 2.72-0.21h27.32l-8.83 9.04s-1.31 1.65-1.44 1.94c-0.45 0.93-0.59 2.59-0.13 3.51 0.35 0.69 1.46 1.87 2.23 1.98 1.03 0.14 2.12 0.39 3.02-0.14 0.33-0.2 1.64-1.32 1.64-1.32l17.49-17.49s1.35-1.09 1.6-1.6c0.17-0.34 0.29-0.82 0.15-1.18z"/></g></g></g></g></svg>
|
||||||
|
After Width: | Height: | Size: 2.7 KiB |
57
changedetectionio/static/images/oxylabs.svg
Normal file
57
changedetectionio/static/images/oxylabs.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 9.7 KiB |
@@ -114,11 +114,11 @@ $(document).ready(function () {
|
|||||||
e.preventDefault()
|
e.preventDefault()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// When the mouse moves we know which element it should be above
|
||||||
|
// mousedown will link that to the UI (select the right action, highlight etc)
|
||||||
$('#browsersteps-selector-canvas').bind('mousedown', function (e) {
|
$('#browsersteps-selector-canvas').bind('mousedown', function (e) {
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent
|
// https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent
|
||||||
e.preventDefault()
|
e.preventDefault()
|
||||||
console.log(e);
|
|
||||||
console.log("current xpath in index is " + current_selected_i);
|
|
||||||
last_click_xy = {'x': parseInt((1 / x_scale) * e.offsetX), 'y': parseInt((1 / y_scale) * e.offsetY)}
|
last_click_xy = {'x': parseInt((1 / x_scale) * e.offsetX), 'y': parseInt((1 / y_scale) * e.offsetY)}
|
||||||
process_selected(current_selected_i);
|
process_selected(current_selected_i);
|
||||||
current_selected_i = false;
|
current_selected_i = false;
|
||||||
@@ -132,6 +132,7 @@ $(document).ready(function () {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Debounce and find the current most 'interesting' element we are hovering above
|
||||||
$('#browsersteps-selector-canvas').bind('mousemove', function (e) {
|
$('#browsersteps-selector-canvas').bind('mousemove', function (e) {
|
||||||
if (!xpath_data) {
|
if (!xpath_data) {
|
||||||
return;
|
return;
|
||||||
@@ -151,41 +152,40 @@ $(document).ready(function () {
|
|||||||
current_selected_i = false;
|
current_selected_i = false;
|
||||||
// Reverse order - the most specific one should be deeper/"laster"
|
// Reverse order - the most specific one should be deeper/"laster"
|
||||||
// Basically, find the most 'deepest'
|
// Basically, find the most 'deepest'
|
||||||
//$('#browsersteps-selector-canvas').css('cursor', 'pointer');
|
var possible_elements = [];
|
||||||
for (var i = xpath_data['size_pos'].length; i !== 0; i--) {
|
xpath_data['size_pos'].forEach(function (item, index) {
|
||||||
// draw all of them? let them choose somehow?
|
|
||||||
var sel = xpath_data['size_pos'][i - 1];
|
|
||||||
// If we are in a bounding-box
|
// If we are in a bounding-box
|
||||||
if (e.offsetY > sel.top * y_scale && e.offsetY < sel.top * y_scale + sel.height * y_scale
|
if (e.offsetY > item.top * y_scale && e.offsetY < item.top * y_scale + item.height * y_scale
|
||||||
&&
|
&&
|
||||||
e.offsetX > sel.left * y_scale && e.offsetX < sel.left * y_scale + sel.width * y_scale
|
e.offsetX > item.left * y_scale && e.offsetX < item.left * y_scale + item.width * y_scale
|
||||||
|
|
||||||
) {
|
) {
|
||||||
// Only highlight these interesting types
|
// There could be many elements here, record them all and then we'll find out which is the most 'useful'
|
||||||
if (1) {
|
// (input, textarea, button, A etc)
|
||||||
ctx.strokeRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
|
if (item.width < xpath_data['browser_width']) {
|
||||||
ctx.fillRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
|
possible_elements.push(item);
|
||||||
current_selected_i = i - 1;
|
|
||||||
break;
|
|
||||||
|
|
||||||
// find the smallest one at this x,y
|
|
||||||
// does it mean sort the xpath list by size (w*h) i think so!
|
|
||||||
} else {
|
|
||||||
|
|
||||||
if (include_text_elements[0].checked === true) {
|
|
||||||
// blue one with background instead?
|
|
||||||
ctx.fillStyle = 'rgba(0,0,255, 0.1)';
|
|
||||||
ctx.strokeStyle = 'rgba(0,0,200, 0.7)';
|
|
||||||
$('#browsersteps-selector-canvas').css('cursor', 'grab');
|
|
||||||
ctx.strokeRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
|
|
||||||
ctx.fillRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
|
|
||||||
current_selected_i = i - 1;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Find the best one
|
||||||
|
if (possible_elements.length) {
|
||||||
|
possible_elements.forEach(function (item, index) {
|
||||||
|
if (["a", "input", "textarea", "button"].includes(item['tagName'])) {
|
||||||
|
current_selected_i = item;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!current_selected_i) {
|
||||||
|
current_selected_i = possible_elements[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
sel = xpath_data['size_pos'][current_selected_i];
|
||||||
|
ctx.strokeRect(current_selected_i.left * x_scale, current_selected_i.top * y_scale, current_selected_i.width * x_scale, current_selected_i.height * y_scale);
|
||||||
|
ctx.fillRect(current_selected_i.left * x_scale, current_selected_i.top * y_scale, current_selected_i.width * x_scale, current_selected_i.height * y_scale);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}.debounce(10));
|
}.debounce(10));
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -195,45 +195,37 @@ $(document).ready(function () {
|
|||||||
|
|
||||||
|
|
||||||
// callback for clicking on an xpath on the canvas
|
// callback for clicking on an xpath on the canvas
|
||||||
function process_selected(xpath_data_index) {
|
function process_selected(selected_in_xpath_list) {
|
||||||
found_something = false;
|
found_something = false;
|
||||||
var first_available = $("ul#browser_steps li.empty").first();
|
var first_available = $("ul#browser_steps li.empty").first();
|
||||||
|
|
||||||
|
|
||||||
if (xpath_data_index !== false) {
|
if (selected_in_xpath_list !== false) {
|
||||||
// Nothing focused, so fill in a new one
|
// Nothing focused, so fill in a new one
|
||||||
// if inpt type button or <button>
|
// if inpt type button or <button>
|
||||||
// from the top, find the next not used one and use it
|
// from the top, find the next not used one and use it
|
||||||
var x = xpath_data['size_pos'][xpath_data_index];
|
var x = selected_in_xpath_list;
|
||||||
console.log(x);
|
console.log(x);
|
||||||
if (x && first_available.length) {
|
if (x && first_available.length) {
|
||||||
// @todo will it let you click shit that has a layer ontop? probably not.
|
// @todo will it let you click shit that has a layer ontop? probably not.
|
||||||
if (x['tagtype'] === 'text' || x['tagtype'] === 'email' || x['tagName'] === 'textarea' || x['tagtype'] === 'password' || x['tagtype'] === 'search') {
|
if (x['tagtype'] === 'text' || x['tagtype'] === 'number' || x['tagtype'] === 'email' || x['tagName'] === 'textarea' || x['tagtype'] === 'password' || x['tagtype'] === 'search') {
|
||||||
$('select', first_available).val('Enter text in field').change();
|
$('select', first_available).val('Enter text in field').change();
|
||||||
$('input[type=text]', first_available).first().val(x['xpath']);
|
$('input[type=text]', first_available).first().val(x['xpath']);
|
||||||
$('input[placeholder="Value"]', first_available).addClass('ok').click().focus();
|
$('input[placeholder="Value"]', first_available).addClass('ok').click().focus();
|
||||||
found_something = true;
|
found_something = true;
|
||||||
} else {
|
} else {
|
||||||
if (x['isClickable'] || x['tagName'].startsWith('h') || x['tagName'] === 'a' || x['tagName'] === 'button' || x['tagtype'] === 'submit' || x['tagtype'] === 'checkbox' || x['tagtype'] === 'radio' || x['tagtype'] === 'li') {
|
// There's no good way (that I know) to find if this
|
||||||
|
// see https://stackoverflow.com/questions/446892/how-to-find-event-listeners-on-a-dom-node-in-javascript-or-in-debugging
|
||||||
|
// https://codepen.io/azaslavsky/pen/DEJVWv
|
||||||
|
|
||||||
|
// So we dont know if its really a clickable element or not :-(
|
||||||
|
// Assume it is - then we dont fill the pages with unreliable "Click X,Y" selections
|
||||||
|
// If you switch to "Click X,y" after an element here is setup, it will give the last co-ords anyway
|
||||||
|
//if (x['isClickable'] || x['tagName'].startsWith('h') || x['tagName'] === 'a' || x['tagName'] === 'button' || x['tagtype'] === 'submit' || x['tagtype'] === 'checkbox' || x['tagtype'] === 'radio' || x['tagtype'] === 'li') {
|
||||||
$('select', first_available).val('Click element').change();
|
$('select', first_available).val('Click element').change();
|
||||||
$('input[type=text]', first_available).first().val(x['xpath']);
|
$('input[type=text]', first_available).first().val(x['xpath']);
|
||||||
found_something = true;
|
found_something = true;
|
||||||
}
|
//}
|
||||||
}
|
|
||||||
|
|
||||||
first_available.xpath_data_index = xpath_data_index;
|
|
||||||
|
|
||||||
if (!found_something) {
|
|
||||||
if (include_text_elements[0].checked === true) {
|
|
||||||
// Suggest that we use as filter?
|
|
||||||
// @todo filters should always be in the last steps, nothing non-filter after it
|
|
||||||
found_something = true;
|
|
||||||
ctx.strokeStyle = 'rgba(0,0,255, 0.9)';
|
|
||||||
ctx.fillStyle = 'rgba(0,0,255, 0.1)';
|
|
||||||
$('select', first_available).val('Extract text and use as filter').change();
|
|
||||||
$('input[type=text]', first_available).first().val(x['xpath']);
|
|
||||||
include_text_elements[0].checked = false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -248,7 +240,7 @@ $(document).ready(function () {
|
|||||||
|
|
||||||
function start() {
|
function start() {
|
||||||
console.log("Starting browser-steps UI");
|
console.log("Starting browser-steps UI");
|
||||||
browsersteps_session_id = Date.now();
|
browsersteps_session_id = false;
|
||||||
// @todo This setting of the first one should be done at the datalayer but wtforms doesnt wanna play nice
|
// @todo This setting of the first one should be done at the datalayer but wtforms doesnt wanna play nice
|
||||||
$('#browser_steps >li:first-child').removeClass('empty');
|
$('#browser_steps >li:first-child').removeClass('empty');
|
||||||
set_first_gotosite_disabled();
|
set_first_gotosite_disabled();
|
||||||
@@ -256,7 +248,7 @@ $(document).ready(function () {
|
|||||||
$('.clear,.remove', $('#browser_steps >li:first-child')).hide();
|
$('.clear,.remove', $('#browser_steps >li:first-child')).hide();
|
||||||
$.ajax({
|
$.ajax({
|
||||||
type: "GET",
|
type: "GET",
|
||||||
url: browser_steps_sync_url + "&browsersteps_session_id=" + browsersteps_session_id,
|
url: browser_steps_start_url,
|
||||||
statusCode: {
|
statusCode: {
|
||||||
400: function () {
|
400: function () {
|
||||||
// More than likely the CSRF token was lost when the server restarted
|
// More than likely the CSRF token was lost when the server restarted
|
||||||
@@ -264,12 +256,12 @@ $(document).ready(function () {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}).done(function (data) {
|
}).done(function (data) {
|
||||||
xpath_data = data.xpath_data;
|
|
||||||
$("#loading-status-text").fadeIn();
|
$("#loading-status-text").fadeIn();
|
||||||
|
browsersteps_session_id = data.browsersteps_session_id;
|
||||||
// This should trigger 'Goto site'
|
// This should trigger 'Goto site'
|
||||||
console.log("Got startup response, requesting Goto-Site (first) step fake click");
|
console.log("Got startup response, requesting Goto-Site (first) step fake click");
|
||||||
$('#browser_steps >li:first-child .apply').click();
|
$('#browser_steps >li:first-child .apply').click();
|
||||||
browserless_seconds_remaining = data.browser_time_remaining;
|
browserless_seconds_remaining = 500;
|
||||||
set_first_gotosite_disabled();
|
set_first_gotosite_disabled();
|
||||||
}).fail(function (data) {
|
}).fail(function (data) {
|
||||||
console.log(data);
|
console.log(data);
|
||||||
@@ -430,7 +422,6 @@ $(document).ready(function () {
|
|||||||
apply_buttons_disabled = false;
|
apply_buttons_disabled = false;
|
||||||
$("#browsersteps-img").css('opacity', 1);
|
$("#browsersteps-img").css('opacity', 1);
|
||||||
$('ul#browser_steps li .control .apply').css('opacity', 1);
|
$('ul#browser_steps li .control .apply').css('opacity', 1);
|
||||||
browserless_seconds_remaining = data.browser_time_remaining;
|
|
||||||
$("#loading-status-text").hide();
|
$("#loading-status-text").hide();
|
||||||
set_first_gotosite_disabled();
|
set_first_gotosite_disabled();
|
||||||
}).fail(function (data) {
|
}).fail(function (data) {
|
||||||
|
|||||||
@@ -1,4 +1,13 @@
|
|||||||
$(document).ready(function () {
|
$(document).ready(function () {
|
||||||
|
var csrftoken = $('input[name=csrf_token]').val();
|
||||||
|
$.ajaxSetup({
|
||||||
|
beforeSend: function (xhr, settings) {
|
||||||
|
if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) {
|
||||||
|
xhr.setRequestHeader("X-CSRFToken", csrftoken)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
// Load it when the #screenshot tab is in use, so we dont give a slow experience when waiting for the text diff to load
|
// Load it when the #screenshot tab is in use, so we dont give a slow experience when waiting for the text diff to load
|
||||||
window.addEventListener('hashchange', function (e) {
|
window.addEventListener('hashchange', function (e) {
|
||||||
toggle(location.hash);
|
toggle(location.hash);
|
||||||
@@ -15,11 +24,71 @@ $(document).ready(function () {
|
|||||||
$("#settings").hide();
|
$("#settings").hide();
|
||||||
} else if (hash_name === '#extract') {
|
} else if (hash_name === '#extract') {
|
||||||
$("#settings").hide();
|
$("#settings").hide();
|
||||||
}
|
} else {
|
||||||
|
|
||||||
|
|
||||||
else {
|
|
||||||
$("#settings").show();
|
$("#settings").show();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const article = $('.highlightable-filter')[0];
|
||||||
|
|
||||||
|
// We could also add the 'touchend' event for touch devices, but since
|
||||||
|
// most iOS/Android browsers already show a dialog when you select
|
||||||
|
// text (often with a Share option) we'll skip that
|
||||||
|
article.addEventListener('mouseup', dragTextHandler, false);
|
||||||
|
article.addEventListener('mousedown', clean, false);
|
||||||
|
|
||||||
|
function clean(event) {
|
||||||
|
$("#highlightSnippet").remove();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function dragTextHandler(event) {
|
||||||
|
console.log('mouseupped');
|
||||||
|
|
||||||
|
// Check if any text was selected
|
||||||
|
if (window.getSelection().toString().length > 0) {
|
||||||
|
|
||||||
|
// Find out how much (if any) user has scrolled
|
||||||
|
var scrollTop = (window.pageYOffset !== undefined) ? window.pageYOffset : (document.documentElement || document.body.parentNode || document.body).scrollTop;
|
||||||
|
|
||||||
|
// Get cursor position
|
||||||
|
const posX = event.clientX;
|
||||||
|
const posY = event.clientY + 20 + scrollTop;
|
||||||
|
|
||||||
|
// Append HTML to the body, create the "Tweet Selection" dialog
|
||||||
|
document.body.insertAdjacentHTML('beforeend', '<div id="highlightSnippet" style="position: absolute; top: ' + posY + 'px; left: ' + posX + 'px;"><div class="pure-form-message-inline" style="font-size: 70%">Ignore any change on any line which contains the selected text.</div><br><a data-mode="exact" href="javascript:void(0);" class="pure-button button-secondary button-xsmall">Ignore exact text</a> </div>');
|
||||||
|
|
||||||
|
if (/\d/.test(window.getSelection().toString())) {
|
||||||
|
// Offer regex replacement
|
||||||
|
document.getElementById("highlightSnippet").insertAdjacentHTML('beforeend', '<a data-mode="digit-regex" href="javascript:void(0);" class="pure-button button-secondary button-xsmall">Ignore text including number changes</a>');
|
||||||
|
}
|
||||||
|
|
||||||
|
$('#highlightSnippet a').bind('click', function (e) {
|
||||||
|
if(!window.getSelection().toString().trim().length) {
|
||||||
|
alert('Oops no text selected!');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
$.ajax({
|
||||||
|
type: "POST",
|
||||||
|
url: highlight_submit_ignore_url,
|
||||||
|
data: {'mode': $(this).data('mode'), 'selection': window.getSelection().toString()},
|
||||||
|
statusCode: {
|
||||||
|
400: function () {
|
||||||
|
// More than likely the CSRF token was lost when the server restarted
|
||||||
|
alert("There was a problem processing the request, please reload the page.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}).done(function (data) {
|
||||||
|
$("#highlightSnippet").html(data)
|
||||||
|
}).fail(function (data) {
|
||||||
|
console.log(data);
|
||||||
|
alert('There was an error communicating with the server.');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -32,5 +32,10 @@ $(document).ready(function () {
|
|||||||
window.getSelection().removeAllRanges();
|
window.getSelection().removeAllRanges();
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$("#notification-token-toggle").click(function (e) {
|
||||||
|
e.preventDefault();
|
||||||
|
$('#notification-tokens-info').toggle();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -26,9 +26,6 @@ $(document).ready(function() {
|
|||||||
data = {
|
data = {
|
||||||
window_url : window.location.href,
|
window_url : window.location.href,
|
||||||
notification_urls : $('.notification-urls').val(),
|
notification_urls : $('.notification-urls').val(),
|
||||||
notification_title : $('.notification-title').val(),
|
|
||||||
notification_body : $('.notification-body').val(),
|
|
||||||
notification_format : $('.notification-format').val(),
|
|
||||||
}
|
}
|
||||||
for (key in data) {
|
for (key in data) {
|
||||||
if (!data[key].length) {
|
if (!data[key].length) {
|
||||||
|
|||||||
87
changedetectionio/static/js/recheck-proxy.js
Normal file
87
changedetectionio/static/js/recheck-proxy.js
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
$(function () {
|
||||||
|
/* add container before each proxy location to show status */
|
||||||
|
|
||||||
|
var option_li = $('.fetch-backend-proxy li').filter(function() {
|
||||||
|
return $("input",this)[0].value.length >0;
|
||||||
|
});
|
||||||
|
|
||||||
|
//var option_li = $('.fetch-backend-proxy li');
|
||||||
|
var isActive = false;
|
||||||
|
$(option_li).prepend('<div class="proxy-status"></div>');
|
||||||
|
$(option_li).append('<div class="proxy-timing"></div><div class="proxy-check-details"></div>');
|
||||||
|
|
||||||
|
function set_proxy_check_status(proxy_key, state) {
|
||||||
|
// select input by value name
|
||||||
|
const proxy_li = $('input[value="' + proxy_key + '" ]').parent();
|
||||||
|
if (state['status'] === 'RUNNING') {
|
||||||
|
$('.proxy-status', proxy_li).html('<span class="spinner"></span>');
|
||||||
|
}
|
||||||
|
if (state['status'] === 'OK') {
|
||||||
|
$('.proxy-status', proxy_li).html('<span style="color: green; font-weight: bold" >OK</span>');
|
||||||
|
$('.proxy-check-details', proxy_li).html(state['text']);
|
||||||
|
}
|
||||||
|
if (state['status'] === 'ERROR' || state['status'] === 'ERROR OTHER') {
|
||||||
|
$('.proxy-status', proxy_li).html('<span style="color: red; font-weight: bold" >X</span>');
|
||||||
|
$('.proxy-check-details', proxy_li).html(state['text']);
|
||||||
|
}
|
||||||
|
$('.proxy-timing', proxy_li).html(state['time']);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function pollServer() {
|
||||||
|
if (isActive) {
|
||||||
|
window.setTimeout(function () {
|
||||||
|
$.ajax({
|
||||||
|
url: proxy_recheck_status_url,
|
||||||
|
success: function (data) {
|
||||||
|
var all_done = true;
|
||||||
|
$.each(data, function (proxy_key, state) {
|
||||||
|
set_proxy_check_status(proxy_key, state);
|
||||||
|
if (state['status'] === 'RUNNING') {
|
||||||
|
all_done = false;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (all_done) {
|
||||||
|
console.log("Shutting down poller, all done.")
|
||||||
|
isActive = false;
|
||||||
|
} else {
|
||||||
|
pollServer();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
error: function () {
|
||||||
|
//ERROR HANDLING
|
||||||
|
pollServer();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}, 2000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$('#check-all-proxies').click(function (e) {
|
||||||
|
e.preventDefault()
|
||||||
|
$('body').addClass('proxy-check-active');
|
||||||
|
$('.proxy-check-details').html('');
|
||||||
|
$('.proxy-status').html('<span class="spinner"></span>').fadeIn();
|
||||||
|
$('.proxy-timing').html('');
|
||||||
|
|
||||||
|
// Request start, needs CSRF?
|
||||||
|
$.ajax({
|
||||||
|
type: "GET",
|
||||||
|
url: recheck_proxy_start_url,
|
||||||
|
}).done(function (data) {
|
||||||
|
$.each(data, function (proxy_key, state) {
|
||||||
|
set_proxy_check_status(proxy_key, state['status'])
|
||||||
|
});
|
||||||
|
isActive = true;
|
||||||
|
pollServer();
|
||||||
|
|
||||||
|
}).fail(function (data) {
|
||||||
|
console.log(data);
|
||||||
|
alert('There was an error communicating with the server.');
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
@@ -12,7 +12,7 @@ window.addEventListener('hashchange', function () {
|
|||||||
var has_errors = document.querySelectorAll(".messages .error");
|
var has_errors = document.querySelectorAll(".messages .error");
|
||||||
if (!has_errors.length) {
|
if (!has_errors.length) {
|
||||||
if (document.location.hash == "") {
|
if (document.location.hash == "") {
|
||||||
document.querySelector(".tabs ul li:first-child a").click();
|
location.replace(document.querySelector(".tabs ul li:first-child a").hash);
|
||||||
} else {
|
} else {
|
||||||
set_active_tab();
|
set_active_tab();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,22 +3,45 @@
|
|||||||
* Toggles theme between light and dark mode.
|
* Toggles theme between light and dark mode.
|
||||||
*/
|
*/
|
||||||
$(document).ready(function () {
|
$(document).ready(function () {
|
||||||
const button = document.getElementsByClassName("toggle-theme")[0];
|
const button = document.getElementById("toggle-light-mode");
|
||||||
|
|
||||||
button.onclick = () => {
|
button.onclick = () => {
|
||||||
const htmlElement = document.getElementsByTagName("html");
|
const htmlElement = document.getElementsByTagName("html");
|
||||||
const isDarkMode = htmlElement[0].dataset.darkmode === "true";
|
const isDarkMode = htmlElement[0].dataset.darkmode === "true";
|
||||||
htmlElement[0].dataset.darkmode = !isDarkMode;
|
htmlElement[0].dataset.darkmode = !isDarkMode;
|
||||||
if (isDarkMode) {
|
setCookieValue(!isDarkMode);
|
||||||
button.classList.remove("dark");
|
|
||||||
setCookieValue(false);
|
|
||||||
} else {
|
|
||||||
button.classList.add("dark");
|
|
||||||
setCookieValue(true);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const setCookieValue = (value) => {
|
const setCookieValue = (value) => {
|
||||||
document.cookie = `css_dark_mode=${value};max-age=31536000;path=/`
|
document.cookie = `css_dark_mode=${value};max-age=31536000;path=/`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Search input box behaviour
|
||||||
|
const toggle_search = document.getElementById("toggle-search");
|
||||||
|
const search_q = document.getElementById("search-q");
|
||||||
|
window.addEventListener('keydown', function (e) {
|
||||||
|
|
||||||
|
if (e.altKey == true && e.keyCode == 83)
|
||||||
|
search_q.classList.toggle('expanded');
|
||||||
|
search_q.focus();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
search_q.onkeydown = (e) => {
|
||||||
|
var key = e.keyCode || e.which;
|
||||||
|
if (key === 13) {
|
||||||
|
document.searchForm.submit();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
toggle_search.onclick = () => {
|
||||||
|
// Could be that they want to search something once text is in there
|
||||||
|
if (search_q.value.length) {
|
||||||
|
document.searchForm.submit();
|
||||||
|
} else {
|
||||||
|
// If not..
|
||||||
|
search_q.classList.toggle('expanded');
|
||||||
|
search_q.focus();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -61,7 +61,12 @@ $(document).ready(function () {
|
|||||||
function bootstrap_visualselector() {
|
function bootstrap_visualselector() {
|
||||||
if (1) {
|
if (1) {
|
||||||
// bootstrap it, this will trigger everything else
|
// bootstrap it, this will trigger everything else
|
||||||
$("img#selector-background").bind('load', function () {
|
$("img#selector-background").on("error", function () {
|
||||||
|
$('.fetching-update-notice').html("<strong>Ooops!</strong> The VisualSelector tool needs atleast one fetched page, please unpause the watch and/or wait for the watch to complete fetching and then reload this page.");
|
||||||
|
$('.fetching-update-notice').css('color','#bb0000');
|
||||||
|
$('#selector-current-xpath').hide();
|
||||||
|
$('#clear-selector').hide();
|
||||||
|
}).bind('load', function () {
|
||||||
console.log("Loaded background...");
|
console.log("Loaded background...");
|
||||||
c = document.getElementById("selector-canvas");
|
c = document.getElementById("selector-canvas");
|
||||||
// greyed out fill context
|
// greyed out fill context
|
||||||
@@ -79,10 +84,11 @@ $(document).ready(function () {
|
|||||||
}).attr("src", screenshot_url);
|
}).attr("src", screenshot_url);
|
||||||
}
|
}
|
||||||
// Tell visualSelector that the image should update
|
// Tell visualSelector that the image should update
|
||||||
var s = $("img#selector-background").attr('src')+"?"+ new Date().getTime();
|
var s = $("img#selector-background").attr('src') + "?" + new Date().getTime();
|
||||||
$("img#selector-background").attr('src',s)
|
$("img#selector-background").attr('src', s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is fired once the img src is loaded in bootstrap_visualselector()
|
||||||
function fetch_data() {
|
function fetch_data() {
|
||||||
// Image is ready
|
// Image is ready
|
||||||
$('.fetching-update-notice').html("Fetching element data..");
|
$('.fetching-update-notice').html("Fetching element data..");
|
||||||
@@ -99,7 +105,8 @@ $(document).ready(function () {
|
|||||||
reflow_selector();
|
reflow_selector();
|
||||||
$('.fetching-update-notice').fadeOut();
|
$('.fetching-update-notice').fadeOut();
|
||||||
});
|
});
|
||||||
};
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function set_scale() {
|
function set_scale() {
|
||||||
|
|||||||
@@ -1,31 +1,45 @@
|
|||||||
$(function () {
|
$(function () {
|
||||||
// Remove unviewed status when normally clicked
|
// Remove unviewed status when normally clicked
|
||||||
$('.diff-link').click(function () {
|
$('.diff-link').click(function () {
|
||||||
$(this).closest('.unviewed').removeClass('unviewed');
|
$(this).closest('.unviewed').removeClass('unviewed');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$("#checkbox-assign-tag").click(function (e) {
|
||||||
|
$('#op_extradata').val(prompt("Enter a tag name"));
|
||||||
|
});
|
||||||
|
|
||||||
$('.with-share-link > *').click(function () {
|
$('.with-share-link > *').click(function () {
|
||||||
$("#copied-clipboard").remove();
|
$("#copied-clipboard").remove();
|
||||||
|
|
||||||
var range = document.createRange();
|
var range = document.createRange();
|
||||||
var n=$("#share-link")[0];
|
var n = $("#share-link")[0];
|
||||||
range.selectNode(n);
|
range.selectNode(n);
|
||||||
window.getSelection().removeAllRanges();
|
window.getSelection().removeAllRanges();
|
||||||
window.getSelection().addRange(range);
|
window.getSelection().addRange(range);
|
||||||
document.execCommand("copy");
|
document.execCommand("copy");
|
||||||
window.getSelection().removeAllRanges();
|
window.getSelection().removeAllRanges();
|
||||||
|
|
||||||
$('.with-share-link').append('<span style="font-size: 80%; color: #fff;" id="copied-clipboard">Copied to clipboard</span>');
|
$('.with-share-link').append('<span style="font-size: 80%; color: #fff;" id="copied-clipboard">Copied to clipboard</span>');
|
||||||
$("#copied-clipboard").fadeOut(2500, function() {
|
$("#copied-clipboard").fadeOut(2500, function () {
|
||||||
$(this).remove();
|
$(this).remove();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$(".watch-table tr").click(function (event) {
|
||||||
|
var tagName = event.target.tagName.toLowerCase();
|
||||||
|
if (tagName === 'tr' || tagName === 'td') {
|
||||||
|
var x = $('input[type=checkbox]', this);
|
||||||
|
if (x) {
|
||||||
|
$(x).click();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// checkboxes - check all
|
// checkboxes - check all
|
||||||
$("#check-all").click(function (e) {
|
$("#check-all").click(function (e) {
|
||||||
$('input[type=checkbox]').not(this).prop('checked', this.checked);
|
$('input[type=checkbox]').not(this).prop('checked', this.checked);
|
||||||
});
|
});
|
||||||
|
|
||||||
// checkboxes - show/hide buttons
|
// checkboxes - show/hide buttons
|
||||||
$("input[type=checkbox]").click(function (e) {
|
$("input[type=checkbox]").click(function (e) {
|
||||||
if ($('input[type=checkbox]:checked').length) {
|
if ($('input[type=checkbox]:checked').length) {
|
||||||
|
|||||||
@@ -42,4 +42,8 @@ $(document).ready(function () {
|
|||||||
$('#notification_urls').val('');
|
$('#notification_urls').val('');
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
});
|
});
|
||||||
|
$("#notification-token-toggle").click(function (e) {
|
||||||
|
e.preventDefault();
|
||||||
|
$('#notification-tokens-info').toggle();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -218,3 +218,10 @@ td#diff-col div {
|
|||||||
text-align: center; }
|
text-align: center; }
|
||||||
.tab-pane-inner#screenshot img {
|
.tab-pane-inner#screenshot img {
|
||||||
max-width: 99%; }
|
max-width: 99%; }
|
||||||
|
|
||||||
|
#highlightSnippet {
|
||||||
|
background: var(--color-background);
|
||||||
|
padding: 1em;
|
||||||
|
border-radius: 5px;
|
||||||
|
background: var(--color-background);
|
||||||
|
box-shadow: 1px 1px 4px var(--color-shadow-jump); }
|
||||||
|
|||||||
@@ -119,3 +119,11 @@ td#diff-col div {
|
|||||||
max-width: 99%;
|
max-width: 99%;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#highlightSnippet {
|
||||||
|
background: var(--color-background);
|
||||||
|
padding: 1em;
|
||||||
|
border-radius: 5px;
|
||||||
|
background: var(--color-background);
|
||||||
|
box-shadow: 1px 1px 4px var(--color-shadow-jump);
|
||||||
|
}
|
||||||
|
|||||||
@@ -44,7 +44,7 @@
|
|||||||
#browser-steps .flex-wrapper {
|
#browser-steps .flex-wrapper {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-flow: row;
|
flex-flow: row;
|
||||||
height: 600px; /*@todo make this dynamic */
|
height: 70vh;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* this is duplicate :( */
|
/* this is duplicate :( */
|
||||||
|
|||||||
25
changedetectionio/static/styles/scss/parts/_darkmode.scss
Normal file
25
changedetectionio/static/styles/scss/parts/_darkmode.scss
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
|
||||||
|
#toggle-light-mode {
|
||||||
|
width: 3rem;
|
||||||
|
/* default */
|
||||||
|
.icon-dark {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
html[data-darkmode="true"] {
|
||||||
|
#toggle-light-mode {
|
||||||
|
.icon-light {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.icon-dark {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -7,6 +7,7 @@ ul#requests-extra_proxies {
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* each proxy entry is a `table` */
|
/* each proxy entry is a `table` */
|
||||||
table {
|
table {
|
||||||
tr {
|
tr {
|
||||||
@@ -15,3 +16,47 @@ ul#requests-extra_proxies {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#request {
|
||||||
|
/* Auto proxy scan/checker */
|
||||||
|
label[for=proxy] {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
body.proxy-check-active {
|
||||||
|
#request {
|
||||||
|
.proxy-status {
|
||||||
|
width: 2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.proxy-check-details {
|
||||||
|
font-size: 80%;
|
||||||
|
color: #555;
|
||||||
|
display: block;
|
||||||
|
padding-left: 4em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.proxy-timing {
|
||||||
|
font-size: 80%;
|
||||||
|
padding-left: 1rem;
|
||||||
|
color: var(--color-link);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#recommended-proxy {
|
||||||
|
display: grid;
|
||||||
|
gap: 2rem;
|
||||||
|
@media (min-width: 991px) {
|
||||||
|
grid-template-columns: repeat(2, 1fr);
|
||||||
|
}
|
||||||
|
|
||||||
|
> div {
|
||||||
|
border: 1px #aaa solid;
|
||||||
|
border-radius: 4px;
|
||||||
|
padding: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
padding-bottom: 1em;
|
||||||
|
}
|
||||||
|
|||||||
37
changedetectionio/static/styles/scss/parts/_pagination.scss
Normal file
37
changedetectionio/static/styles/scss/parts/_pagination.scss
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
.pagination-page-info {
|
||||||
|
color: #fff;
|
||||||
|
font-size: 0.85rem;
|
||||||
|
text-transform: capitalize;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pagination.menu {
|
||||||
|
> * {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
li {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
padding: 0.65rem;
|
||||||
|
margin: 3px;
|
||||||
|
border: none;
|
||||||
|
background: #444;
|
||||||
|
border-radius: 2px;
|
||||||
|
color: var(--color-text-button);
|
||||||
|
&.disabled {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
&.active {
|
||||||
|
font-weight: bold;
|
||||||
|
background: #888;
|
||||||
|
}
|
||||||
|
|
||||||
|
&:hover {
|
||||||
|
background: #999;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
@@ -5,8 +5,10 @@
|
|||||||
@import "parts/_arrows";
|
@import "parts/_arrows";
|
||||||
@import "parts/_browser-steps";
|
@import "parts/_browser-steps";
|
||||||
@import "parts/_extra_proxies";
|
@import "parts/_extra_proxies";
|
||||||
|
@import "parts/_pagination";
|
||||||
@import "parts/_spinners";
|
@import "parts/_spinners";
|
||||||
@import "parts/_variables";
|
@import "parts/_variables";
|
||||||
|
@import "parts/_darkmode";
|
||||||
|
|
||||||
body {
|
body {
|
||||||
color: var(--color-text);
|
color: var(--color-text);
|
||||||
@@ -53,8 +55,31 @@ a.github-link {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
button.toggle-theme {
|
|
||||||
width: 4rem;
|
#toggle-search {
|
||||||
|
width: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
#search-q {
|
||||||
|
opacity: 0;
|
||||||
|
-webkit-transition: all .9s ease;
|
||||||
|
-moz-transition: all .9s ease;
|
||||||
|
transition: all .9s ease;
|
||||||
|
width: 0;
|
||||||
|
display: none;
|
||||||
|
&.expanded {
|
||||||
|
width: auto;
|
||||||
|
display: inline-block;
|
||||||
|
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#search-result-info {
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
button.toggle-button {
|
||||||
|
vertical-align: middle;
|
||||||
background: transparent;
|
background: transparent;
|
||||||
border: none;
|
border: none;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
@@ -73,19 +98,7 @@ button.toggle-theme {
|
|||||||
display: block;
|
display: block;
|
||||||
}
|
}
|
||||||
|
|
||||||
.icon-dark {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
&.dark {
|
|
||||||
.icon-light {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.icon-dark {
|
|
||||||
display: block;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.pure-menu-horizontal {
|
.pure-menu-horizontal {
|
||||||
|
|||||||
@@ -50,8 +50,7 @@
|
|||||||
#browser-steps .flex-wrapper {
|
#browser-steps .flex-wrapper {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-flow: row;
|
flex-flow: row;
|
||||||
height: 600px;
|
height: 70vh; }
|
||||||
/*@todo make this dynamic */ }
|
|
||||||
|
|
||||||
/* this is duplicate :( */
|
/* this is duplicate :( */
|
||||||
#browsersteps-selector-wrapper {
|
#browsersteps-selector-wrapper {
|
||||||
@@ -95,6 +94,63 @@ ul#requests-extra_proxies {
|
|||||||
ul#requests-extra_proxies table tr {
|
ul#requests-extra_proxies table tr {
|
||||||
display: inline; }
|
display: inline; }
|
||||||
|
|
||||||
|
#request {
|
||||||
|
/* Auto proxy scan/checker */ }
|
||||||
|
#request label[for=proxy] {
|
||||||
|
display: inline-block; }
|
||||||
|
|
||||||
|
body.proxy-check-active #request .proxy-status {
|
||||||
|
width: 2em; }
|
||||||
|
|
||||||
|
body.proxy-check-active #request .proxy-check-details {
|
||||||
|
font-size: 80%;
|
||||||
|
color: #555;
|
||||||
|
display: block;
|
||||||
|
padding-left: 4em; }
|
||||||
|
|
||||||
|
body.proxy-check-active #request .proxy-timing {
|
||||||
|
font-size: 80%;
|
||||||
|
padding-left: 1rem;
|
||||||
|
color: var(--color-link); }
|
||||||
|
|
||||||
|
#recommended-proxy {
|
||||||
|
display: grid;
|
||||||
|
gap: 2rem;
|
||||||
|
padding-bottom: 1em; }
|
||||||
|
@media (min-width: 991px) {
|
||||||
|
#recommended-proxy {
|
||||||
|
grid-template-columns: repeat(2, 1fr); } }
|
||||||
|
#recommended-proxy > div {
|
||||||
|
border: 1px #aaa solid;
|
||||||
|
border-radius: 4px;
|
||||||
|
padding: 1em; }
|
||||||
|
|
||||||
|
.pagination-page-info {
|
||||||
|
color: #fff;
|
||||||
|
font-size: 0.85rem;
|
||||||
|
text-transform: capitalize; }
|
||||||
|
|
||||||
|
.pagination.menu > * {
|
||||||
|
display: inline-block; }
|
||||||
|
|
||||||
|
.pagination.menu li {
|
||||||
|
display: inline-block; }
|
||||||
|
|
||||||
|
.pagination.menu a {
|
||||||
|
padding: 0.65rem;
|
||||||
|
margin: 3px;
|
||||||
|
border: none;
|
||||||
|
background: #444;
|
||||||
|
border-radius: 2px;
|
||||||
|
color: var(--color-text-button); }
|
||||||
|
.pagination.menu a.disabled {
|
||||||
|
display: none; }
|
||||||
|
.pagination.menu a.active {
|
||||||
|
font-weight: bold;
|
||||||
|
background: #888; }
|
||||||
|
.pagination.menu a:hover {
|
||||||
|
background: #999; }
|
||||||
|
|
||||||
/* spinner */
|
/* spinner */
|
||||||
.spinner,
|
.spinner,
|
||||||
.spinner:after {
|
.spinner:after {
|
||||||
@@ -271,6 +327,18 @@ html[data-darkmode="true"] {
|
|||||||
html[data-darkmode="true"] .watch-table .unviewed.error {
|
html[data-darkmode="true"] .watch-table .unviewed.error {
|
||||||
color: var(--color-watch-table-error); }
|
color: var(--color-watch-table-error); }
|
||||||
|
|
||||||
|
#toggle-light-mode {
|
||||||
|
width: 3rem;
|
||||||
|
/* default */ }
|
||||||
|
#toggle-light-mode .icon-dark {
|
||||||
|
display: none; }
|
||||||
|
|
||||||
|
html[data-darkmode="true"] #toggle-light-mode .icon-light {
|
||||||
|
display: none; }
|
||||||
|
|
||||||
|
html[data-darkmode="true"] #toggle-light-mode .icon-dark {
|
||||||
|
display: block; }
|
||||||
|
|
||||||
body {
|
body {
|
||||||
color: var(--color-text);
|
color: var(--color-text);
|
||||||
background: var(--color-background-page); }
|
background: var(--color-background-page); }
|
||||||
@@ -305,23 +373,35 @@ a.github-link {
|
|||||||
a.github-link:hover {
|
a.github-link:hover {
|
||||||
color: var(--color-icon-github-hover); }
|
color: var(--color-icon-github-hover); }
|
||||||
|
|
||||||
button.toggle-theme {
|
#toggle-search {
|
||||||
width: 4rem;
|
width: 2rem; }
|
||||||
|
|
||||||
|
#search-q {
|
||||||
|
opacity: 0;
|
||||||
|
-webkit-transition: all .9s ease;
|
||||||
|
-moz-transition: all .9s ease;
|
||||||
|
transition: all .9s ease;
|
||||||
|
width: 0;
|
||||||
|
display: none; }
|
||||||
|
#search-q.expanded {
|
||||||
|
width: auto;
|
||||||
|
display: inline-block;
|
||||||
|
opacity: 1; }
|
||||||
|
|
||||||
|
#search-result-info {
|
||||||
|
color: #fff; }
|
||||||
|
|
||||||
|
button.toggle-button {
|
||||||
|
vertical-align: middle;
|
||||||
background: transparent;
|
background: transparent;
|
||||||
border: none;
|
border: none;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
color: var(--color-icon-github); }
|
color: var(--color-icon-github); }
|
||||||
button.toggle-theme:hover {
|
button.toggle-button:hover {
|
||||||
color: var(--color-icon-github-hover); }
|
color: var(--color-icon-github-hover); }
|
||||||
button.toggle-theme svg {
|
button.toggle-button svg {
|
||||||
fill: currentColor; }
|
fill: currentColor; }
|
||||||
button.toggle-theme .icon-light {
|
button.toggle-button .icon-light {
|
||||||
display: block; }
|
|
||||||
button.toggle-theme .icon-dark {
|
|
||||||
display: none; }
|
|
||||||
button.toggle-theme.dark .icon-light {
|
|
||||||
display: none; }
|
|
||||||
button.toggle-theme.dark .icon-dark {
|
|
||||||
display: block; }
|
display: block; }
|
||||||
|
|
||||||
.pure-menu-horizontal {
|
.pure-menu-horizontal {
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
|
from distutils.util import strtobool
|
||||||
|
|
||||||
from flask import (
|
from flask import (
|
||||||
flash
|
flash
|
||||||
)
|
)
|
||||||
|
|
||||||
from . model import App, Watch
|
from . model import App, Watch
|
||||||
from copy import deepcopy
|
from copy import deepcopy, copy
|
||||||
from os import path, unlink
|
from os import path, unlink
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
import json
|
import json
|
||||||
@@ -16,6 +18,11 @@ import threading
|
|||||||
import time
|
import time
|
||||||
import uuid as uuid_builder
|
import uuid as uuid_builder
|
||||||
|
|
||||||
|
# Because the server will run as a daemon and wont know the URL for notification links when firing off a notification
|
||||||
|
BASE_URL_NOT_SET_TEXT = '("Base URL" not set - see settings - notifications)'
|
||||||
|
|
||||||
|
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
|
||||||
|
|
||||||
# Is there an existing library to ensure some data store (JSON etc) is in sync with CRUD methods?
|
# Is there an existing library to ensure some data store (JSON etc) is in sync with CRUD methods?
|
||||||
# Open a github issue if you know something :)
|
# Open a github issue if you know something :)
|
||||||
# https://stackoverflow.com/questions/6190468/how-to-trigger-function-on-value-change
|
# https://stackoverflow.com/questions/6190468/how-to-trigger-function-on-value-change
|
||||||
@@ -171,26 +178,21 @@ class ChangeDetectionStore:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def data(self):
|
def data(self):
|
||||||
# Re #152, Return env base_url if not overriden, @todo also prefer the proxy pass url
|
# Re #152, Return env base_url if not overriden
|
||||||
env_base_url = os.getenv('BASE_URL','')
|
# Re #148 - Some people have just {{ base_url }} in the body or title, but this may break some notification services
|
||||||
if not self.__data['settings']['application']['base_url']:
|
# like 'Join', so it's always best to atleast set something obvious so that they are not broken.
|
||||||
self.__data['settings']['application']['base_url'] = env_base_url.strip('" ')
|
|
||||||
|
|
||||||
return self.__data
|
active_base_url = BASE_URL_NOT_SET_TEXT
|
||||||
|
if self.__data['settings']['application'].get('base_url'):
|
||||||
|
active_base_url = self.__data['settings']['application'].get('base_url')
|
||||||
|
elif os.getenv('BASE_URL'):
|
||||||
|
active_base_url = os.getenv('BASE_URL')
|
||||||
|
|
||||||
def get_all_tags(self):
|
# I looked at various ways todo the following, but in the end just copying the dict seemed simplest/most reliable
|
||||||
tags = []
|
# even given the memory tradeoff - if you know a better way.. maybe return d|self.__data.. or something
|
||||||
for uuid, watch in self.data['watching'].items():
|
d = self.__data
|
||||||
if watch['tag'] is None:
|
d['settings']['application']['active_base_url'] = active_base_url.strip('" ')
|
||||||
continue
|
return d
|
||||||
# Support for comma separated list of tags.
|
|
||||||
for tag in watch['tag'].split(','):
|
|
||||||
tag = tag.strip()
|
|
||||||
if tag not in tags:
|
|
||||||
tags.append(tag)
|
|
||||||
|
|
||||||
tags.sort()
|
|
||||||
return tags
|
|
||||||
|
|
||||||
# Delete a single watch by UUID
|
# Delete a single watch by UUID
|
||||||
def delete(self, uuid):
|
def delete(self, uuid):
|
||||||
@@ -204,22 +206,22 @@ class ChangeDetectionStore:
|
|||||||
# GitHub #30 also delete history records
|
# GitHub #30 also delete history records
|
||||||
for uuid in self.data['watching']:
|
for uuid in self.data['watching']:
|
||||||
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
|
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
|
||||||
shutil.rmtree(path)
|
if os.path.exists(path):
|
||||||
self.needs_write_urgent = True
|
shutil.rmtree(path)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
|
path = pathlib.Path(os.path.join(self.datastore_path, uuid))
|
||||||
shutil.rmtree(path)
|
if os.path.exists(path):
|
||||||
|
shutil.rmtree(path)
|
||||||
del self.data['watching'][uuid]
|
del self.data['watching'][uuid]
|
||||||
|
|
||||||
self.needs_write_urgent = True
|
self.needs_write_urgent = True
|
||||||
|
|
||||||
# Clone a watch by UUID
|
# Clone a watch by UUID
|
||||||
def clone(self, uuid):
|
def clone(self, uuid):
|
||||||
url = self.data['watching'][uuid]['url']
|
url = self.data['watching'][uuid].get('url')
|
||||||
tag = self.data['watching'][uuid]['tag']
|
|
||||||
extras = self.data['watching'][uuid]
|
extras = self.data['watching'][uuid]
|
||||||
new_uuid = self.add_watch(url=url, tag=tag, extras=extras)
|
new_uuid = self.add_watch(url=url, extras=extras)
|
||||||
return new_uuid
|
return new_uuid
|
||||||
|
|
||||||
def url_exists(self, url):
|
def url_exists(self, url):
|
||||||
@@ -254,16 +256,14 @@ class ChangeDetectionStore:
|
|||||||
|
|
||||||
self.needs_write_urgent = True
|
self.needs_write_urgent = True
|
||||||
|
|
||||||
def add_watch(self, url, tag="", extras=None, write_to_disk_now=True):
|
def add_watch(self, url, tag='', extras=None, tag_uuids=None, write_to_disk_now=True):
|
||||||
|
|
||||||
if extras is None:
|
if extras is None:
|
||||||
extras = {}
|
extras = {}
|
||||||
# should always be str
|
|
||||||
if tag is None or not tag:
|
|
||||||
tag = ''
|
|
||||||
|
|
||||||
# Incase these are copied across, assume it's a reference and deepcopy()
|
# Incase these are copied across, assume it's a reference and deepcopy()
|
||||||
apply_extras = deepcopy(extras)
|
apply_extras = deepcopy(extras)
|
||||||
|
apply_extras['tags'] = [] if not apply_extras.get('tags') else apply_extras.get('tags')
|
||||||
|
|
||||||
# Was it a share link? try to fetch the data
|
# Was it a share link? try to fetch the data
|
||||||
if (url.startswith("https://changedetection.io/share/")):
|
if (url.startswith("https://changedetection.io/share/")):
|
||||||
@@ -290,6 +290,7 @@ class ChangeDetectionStore:
|
|||||||
'processor',
|
'processor',
|
||||||
'subtractive_selectors',
|
'subtractive_selectors',
|
||||||
'tag',
|
'tag',
|
||||||
|
'tags',
|
||||||
'text_should_not_be_present',
|
'text_should_not_be_present',
|
||||||
'title',
|
'title',
|
||||||
'trigger_text',
|
'trigger_text',
|
||||||
@@ -312,24 +313,39 @@ class ChangeDetectionStore:
|
|||||||
flash('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX', 'error')
|
flash('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX', 'error')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
with self.lock:
|
if tag and type(tag) == str:
|
||||||
# #Re 569
|
# Then it's probably a string of the actual tag by name, split and add it
|
||||||
new_watch = Watch.model(datastore_path=self.datastore_path, default={
|
for t in tag.split(','):
|
||||||
'url': url,
|
# for each stripped tag, add tag as UUID
|
||||||
'tag': tag
|
for a_t in t.split(','):
|
||||||
})
|
tag_uuid = self.add_tag(a_t)
|
||||||
|
apply_extras['tags'].append(tag_uuid)
|
||||||
|
|
||||||
new_uuid = new_watch['uuid']
|
# Or if UUIDs given directly
|
||||||
logging.debug("Added URL {} - {}".format(url, new_uuid))
|
if tag_uuids:
|
||||||
|
apply_extras['tags'] = list(set(apply_extras['tags'] + tag_uuids))
|
||||||
|
|
||||||
for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']:
|
# Make any uuids unique
|
||||||
if k in apply_extras:
|
if apply_extras.get('tags'):
|
||||||
del apply_extras[k]
|
apply_extras['tags'] = list(set(apply_extras.get('tags')))
|
||||||
|
|
||||||
new_watch.update(apply_extras)
|
new_watch = Watch.model(datastore_path=self.datastore_path, url=url)
|
||||||
self.__data['watching'][new_uuid] = new_watch
|
|
||||||
|
new_uuid = new_watch.get('uuid')
|
||||||
|
|
||||||
|
logging.debug("Added URL {} - {}".format(url, new_uuid))
|
||||||
|
|
||||||
|
for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']:
|
||||||
|
if k in apply_extras:
|
||||||
|
del apply_extras[k]
|
||||||
|
|
||||||
|
if not apply_extras.get('date_created'):
|
||||||
|
apply_extras['date_created'] = int(time.time())
|
||||||
|
|
||||||
|
new_watch.update(apply_extras)
|
||||||
|
new_watch.ensure_data_dir_exists()
|
||||||
|
self.__data['watching'][new_uuid] = new_watch
|
||||||
|
|
||||||
self.__data['watching'][new_uuid].ensure_data_dir_exists()
|
|
||||||
|
|
||||||
if write_to_disk_now:
|
if write_to_disk_now:
|
||||||
self.sync_to_json()
|
self.sync_to_json()
|
||||||
@@ -365,19 +381,21 @@ class ChangeDetectionStore:
|
|||||||
def save_error_text(self, watch_uuid, contents):
|
def save_error_text(self, watch_uuid, contents):
|
||||||
if not self.data['watching'].get(watch_uuid):
|
if not self.data['watching'].get(watch_uuid):
|
||||||
return
|
return
|
||||||
target_path = os.path.join(self.datastore_path, watch_uuid, "last-error.txt")
|
|
||||||
|
|
||||||
|
self.data['watching'][watch_uuid].ensure_data_dir_exists()
|
||||||
|
target_path = os.path.join(self.datastore_path, watch_uuid, "last-error.txt")
|
||||||
with open(target_path, 'w') as f:
|
with open(target_path, 'w') as f:
|
||||||
f.write(contents)
|
f.write(contents)
|
||||||
|
|
||||||
def save_xpath_data(self, watch_uuid, data, as_error=False):
|
def save_xpath_data(self, watch_uuid, data, as_error=False):
|
||||||
|
|
||||||
if not self.data['watching'].get(watch_uuid):
|
if not self.data['watching'].get(watch_uuid):
|
||||||
return
|
return
|
||||||
if as_error:
|
if as_error:
|
||||||
target_path = os.path.join(self.datastore_path, watch_uuid, "elements-error.json")
|
target_path = os.path.join(self.datastore_path, watch_uuid, "elements-error.json")
|
||||||
else:
|
else:
|
||||||
target_path = os.path.join(self.datastore_path, watch_uuid, "elements.json")
|
target_path = os.path.join(self.datastore_path, watch_uuid, "elements.json")
|
||||||
|
self.data['watching'][watch_uuid].ensure_data_dir_exists()
|
||||||
with open(target_path, 'w') as f:
|
with open(target_path, 'w') as f:
|
||||||
f.write(json.dumps(data))
|
f.write(json.dumps(data))
|
||||||
f.close()
|
f.close()
|
||||||
@@ -467,12 +485,12 @@ class ChangeDetectionStore:
|
|||||||
k = "ui-" + str(i) + proxy.get('proxy_name')
|
k = "ui-" + str(i) + proxy.get('proxy_name')
|
||||||
proxy_list[k] = {'label': proxy.get('proxy_name'), 'url': proxy.get('proxy_url')}
|
proxy_list[k] = {'label': proxy.get('proxy_name'), 'url': proxy.get('proxy_url')}
|
||||||
|
|
||||||
|
if proxy_list and strtobool(os.getenv('ENABLE_NO_PROXY_OPTION', 'True')):
|
||||||
|
proxy_list["no-proxy"] = {'label': "No proxy", 'url': ''}
|
||||||
|
|
||||||
return proxy_list if len(proxy_list) else None
|
return proxy_list if len(proxy_list) else None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get_preferred_proxy_for_watch(self, uuid):
|
def get_preferred_proxy_for_watch(self, uuid):
|
||||||
"""
|
"""
|
||||||
Returns the preferred proxy by ID key
|
Returns the preferred proxy by ID key
|
||||||
@@ -486,6 +504,9 @@ class ChangeDetectionStore:
|
|||||||
# If it's a valid one
|
# If it's a valid one
|
||||||
watch = self.data['watching'].get(uuid)
|
watch = self.data['watching'].get(uuid)
|
||||||
|
|
||||||
|
if strtobool(os.getenv('ENABLE_NO_PROXY_OPTION', 'True')) and watch.get('proxy') == "no-proxy":
|
||||||
|
return None
|
||||||
|
|
||||||
if watch.get('proxy') and watch.get('proxy') in list(self.proxy_list.keys()):
|
if watch.get('proxy') and watch.get('proxy') in list(self.proxy_list.keys()):
|
||||||
return watch.get('proxy')
|
return watch.get('proxy')
|
||||||
|
|
||||||
@@ -504,6 +525,105 @@ class ChangeDetectionStore:
|
|||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_extra_headers_file(self):
|
||||||
|
filepath = os.path.join(self.datastore_path, 'headers.txt')
|
||||||
|
return os.path.isfile(filepath)
|
||||||
|
|
||||||
|
def get_all_base_headers(self):
|
||||||
|
from .model.App import parse_headers_from_text_file
|
||||||
|
headers = {}
|
||||||
|
# Global app settings
|
||||||
|
headers.update(self.data['settings'].get('headers', {}))
|
||||||
|
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def get_all_headers_in_textfile_for_watch(self, uuid):
|
||||||
|
from .model.App import parse_headers_from_text_file
|
||||||
|
headers = {}
|
||||||
|
|
||||||
|
# Global in /datastore/headers.txt
|
||||||
|
filepath = os.path.join(self.datastore_path, 'headers.txt')
|
||||||
|
try:
|
||||||
|
if os.path.isfile(filepath):
|
||||||
|
headers.update(parse_headers_from_text_file(filepath))
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||||
|
|
||||||
|
watch = self.data['watching'].get(uuid)
|
||||||
|
if watch:
|
||||||
|
|
||||||
|
# In /datastore/xyz-xyz/headers.txt
|
||||||
|
filepath = os.path.join(watch.watch_data_dir, 'headers.txt')
|
||||||
|
try:
|
||||||
|
if os.path.isfile(filepath):
|
||||||
|
headers.update(parse_headers_from_text_file(filepath))
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||||
|
|
||||||
|
# In /datastore/tag-name.txt
|
||||||
|
tags = self.get_all_tags_for_watch(uuid=uuid)
|
||||||
|
for tag_uuid, tag in tags.items():
|
||||||
|
fname = "headers-"+re.sub(r'[\W_]', '', tag.get('title')).lower().strip() + ".txt"
|
||||||
|
filepath = os.path.join(self.datastore_path, fname)
|
||||||
|
try:
|
||||||
|
if os.path.isfile(filepath):
|
||||||
|
headers.update(parse_headers_from_text_file(filepath))
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR reading headers.txt at {filepath}", str(e))
|
||||||
|
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def get_tag_overrides_for_watch(self, uuid, attr):
|
||||||
|
tags = self.get_all_tags_for_watch(uuid=uuid)
|
||||||
|
ret = []
|
||||||
|
|
||||||
|
if tags:
|
||||||
|
for tag_uuid, tag in tags.items():
|
||||||
|
if attr in tag and tag[attr]:
|
||||||
|
ret=[*ret, *tag[attr]]
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def add_tag(self, name):
|
||||||
|
# If name exists, return that
|
||||||
|
n = name.strip().lower()
|
||||||
|
print (f">>> Adding new tag - '{n}'")
|
||||||
|
if not n:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for uuid, tag in self.__data['settings']['application'].get('tags', {}).items():
|
||||||
|
if n == tag.get('title', '').lower().strip():
|
||||||
|
print (f">>> Tag {name} already exists")
|
||||||
|
return uuid
|
||||||
|
|
||||||
|
# Eventually almost everything todo with a watch will apply as a Tag
|
||||||
|
# So we use the same model as a Watch
|
||||||
|
with self.lock:
|
||||||
|
new_tag = Watch.model(datastore_path=self.datastore_path, default={
|
||||||
|
'title': name.strip(),
|
||||||
|
'date_created': int(time.time())
|
||||||
|
})
|
||||||
|
|
||||||
|
new_uuid = new_tag.get('uuid')
|
||||||
|
|
||||||
|
self.__data['settings']['application']['tags'][new_uuid] = new_tag
|
||||||
|
|
||||||
|
return new_uuid
|
||||||
|
|
||||||
|
def get_all_tags_for_watch(self, uuid):
|
||||||
|
"""This should be in Watch model but Watch doesn't have access to datastore, not sure how to solve that yet"""
|
||||||
|
watch = self.data['watching'].get(uuid)
|
||||||
|
|
||||||
|
# Should return a dict of full tag info linked by UUID
|
||||||
|
if watch:
|
||||||
|
return dictfilt(self.__data['settings']['application']['tags'], watch.get('tags', []))
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def tag_exists_by_name(self, tag_name):
|
||||||
|
return any(v.get('title', '').lower() == tag_name.lower() for k, v in self.__data['settings']['application']['tags'].items())
|
||||||
|
|
||||||
# Run all updates
|
# Run all updates
|
||||||
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
|
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
|
||||||
# So therefor - each `update_n` should be very careful about checking if it needs to actually run
|
# So therefor - each `update_n` should be very careful about checking if it needs to actually run
|
||||||
@@ -679,3 +799,25 @@ class ChangeDetectionStore:
|
|||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Create tag objects and their references from existing tag text
|
||||||
|
def update_12(self):
|
||||||
|
i = 0
|
||||||
|
for uuid, watch in self.data['watching'].items():
|
||||||
|
# Split out and convert old tag string
|
||||||
|
tag = watch.get('tag')
|
||||||
|
if tag:
|
||||||
|
tag_uuids = []
|
||||||
|
for t in tag.split(','):
|
||||||
|
tag_uuids.append(self.add_tag(name=t))
|
||||||
|
|
||||||
|
self.data['watching'][uuid]['tags'] = tag_uuids
|
||||||
|
|
||||||
|
# #1775 - Update 11 did not update the records correctly when adding 'date_created' values for sorting
|
||||||
|
def update_13(self):
|
||||||
|
i = 0
|
||||||
|
for uuid, watch in self.data['watching'].items():
|
||||||
|
if not watch.get('date_created'):
|
||||||
|
self.data['watching'][uuid]['date_created'] = i
|
||||||
|
i+=1
|
||||||
|
return
|
||||||
@@ -13,9 +13,9 @@
|
|||||||
<div class="pure-form-message-inline">
|
<div class="pure-form-message-inline">
|
||||||
<ul>
|
<ul>
|
||||||
<li>Use <a target=_new href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service! <i><a target=_new href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes">Please read the notification services wiki here for important configuration notes</a></i>.</li>
|
<li>Use <a target=_new href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service! <i><a target=_new href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes">Please read the notification services wiki here for important configuration notes</a></i>.</li>
|
||||||
<li><code>discord://</code> only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li>
|
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_discord">discord://</a></code> (or <code>https://discord.com/api/webhooks...</code>)) </code> only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li>
|
||||||
<li><code>tgram://</code> bots cant send messages to other bots, so you should specify chat ID of non-bot user.</li>
|
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> bots can't send messages to other bots, so you should specify chat ID of non-bot user.</li>
|
||||||
<li><code>tgram://</code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li>
|
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li>
|
||||||
<li><code>gets://</code>, <code>posts://</code>, <code>puts://</code>, <code>deletes://</code> for direct API calls (or omit the "<code>s</code>" for non-SSL ie <code>get://</code>)</li>
|
<li><code>gets://</code>, <code>posts://</code>, <code>puts://</code>, <code>deletes://</code> for direct API calls (or omit the "<code>s</code>" for non-SSL ie <code>get://</code>)</li>
|
||||||
<li>Accepts the <code>{{ '{{token}}' }}</code> placeholders listed below</li>
|
<li>Accepts the <code>{{ '{{token}}' }}</code> placeholders listed below</li>
|
||||||
</ul>
|
</ul>
|
||||||
@@ -23,7 +23,7 @@
|
|||||||
<div class="notifications-wrapper">
|
<div class="notifications-wrapper">
|
||||||
<a id="send-test-notification" class="pure-button button-secondary button-xsmall" >Send test notification</a>
|
<a id="send-test-notification" class="pure-button button-secondary button-xsmall" >Send test notification</a>
|
||||||
{% if emailprefix %}
|
{% if emailprefix %}
|
||||||
<a id="add-email-helper" class="pure-button button-secondary button-xsmall" >Add email</a>
|
<a id="add-email-helper" class="pure-button button-secondary button-xsmall" >Add email <img style="height: 1em; display: inline-block" src="{{url_for('static_content', group='images', filename='email.svg')}}" alt="Add an email address"> </a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<a href="{{url_for('notification_logs')}}" class="pure-button button-secondary button-xsmall" >Notification debug logs</a>
|
<a href="{{url_for('notification_logs')}}" class="pure-button button-secondary button-xsmall" >Notification debug logs</a>
|
||||||
</div>
|
</div>
|
||||||
@@ -35,18 +35,14 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
{{ render_field(form.notification_body , rows=5, class="notification-body", placeholder=settings_application['notification_body']) }}
|
{{ render_field(form.notification_body , rows=5, class="notification-body", placeholder=settings_application['notification_body']) }}
|
||||||
<span class="pure-form-message-inline">Body for all notifications</span>
|
<span class="pure-form-message-inline">Body for all notifications ‐ You can use <a target="_new" href="https://jinja.palletsprojects.com/en/3.0.x/templates/">Jinja2</a> templating in the notification title, body and URL, and tokens from below.
|
||||||
</div>
|
</span>
|
||||||
<div class="pure-control-group">
|
|
||||||
<!-- unsure -->
|
|
||||||
{{ render_field(form.notification_format , class="notification-format") }}
|
|
||||||
<span class="pure-form-message-inline">Format for all notifications</span>
|
|
||||||
</div>
|
</div>
|
||||||
<div class="pure-controls">
|
<div class="pure-controls">
|
||||||
<p class="pure-form-message-inline">
|
<div id="notification-token-toggle" class="pure-button button-tag button-xsmall">Show token/placeholders</div>
|
||||||
You can use <a target="_new" href="https://jinja.palletsprojects.com/en/3.0.x/templates/">Jinja2</a> templating in the notification title, body and URL.
|
</div>
|
||||||
</p>
|
<div class="pure-controls" style="display: none;" id="notification-tokens-info">
|
||||||
|
|
||||||
<table class="pure-table" id="token-table">
|
<table class="pure-table" id="token-table">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
@@ -99,9 +95,13 @@
|
|||||||
<td><code>{{ '{{diff_full}}' }}</code></td>
|
<td><code>{{ '{{diff_full}}' }}</code></td>
|
||||||
<td>The diff output - full difference output</td>
|
<td>The diff output - full difference output</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><code>{{ '{{diff_patch}}' }}</code></td>
|
||||||
|
<td>The diff output - patch in unified format</td>
|
||||||
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td><code>{{ '{{current_snapshot}}' }}</code></td>
|
<td><code>{{ '{{current_snapshot}}' }}</code></td>
|
||||||
<td>The current snapshot value, useful when combined with JSON or CSS filters
|
<td>The current snapshot text contents value, useful when combined with JSON or CSS filters
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
@@ -111,12 +111,15 @@
|
|||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
<div class="pure-form-message-inline">
|
<div class="pure-form-message-inline">
|
||||||
<br>
|
<p>
|
||||||
URLs generated by changedetection.io (such as <code>{{ '{{diff_url}}' }}</code>) require the <code>BASE_URL</code> environment variable set.<br>
|
Warning: Contents of <code>{{ '{{diff}}' }}</code>, <code>{{ '{{diff_removed}}' }}</code>, and <code>{{ '{{diff_added}}' }}</code> depend on how the difference algorithm perceives the change. <br>
|
||||||
Your <code>BASE_URL</code> var is currently "{{settings_application['current_base_url']}}"
|
For example, an addition or removal could be perceived as a change in some cases. <a target="_new" href="https://github.com/dgtlmoon/changedetection.io/wiki/Using-the-%7B%7Bdiff%7D%7D,-%7B%7Bdiff_added%7D%7D,-and-%7B%7Bdiff_removed%7D%7D-notification-tokens">More Here</a> <br>
|
||||||
<br>
|
</p>
|
||||||
Warning: Contents of <code>{{ '{{diff}}' }}</code>, <code>{{ '{{diff_removed}}' }}</code>, and <code>{{ '{{diff_added}}' }}</code> depend on how the difference algorithm perceives the change. For example, an addition or removal could be perceived as a change in some cases. <a target="_new" href="https://github.com/dgtlmoon/changedetection.io/wiki/Using-the-%7B%7Bdiff%7D%7D,-%7B%7Bdiff_added%7D%7D,-and-%7B%7Bdiff_removal%7D%7D-notification-tokens">More Here</a> </br>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="pure-control-group">
|
||||||
|
{{ render_field(form.notification_format , class="notification-format") }}
|
||||||
|
<span class="pure-form-message-inline">Format for all notifications</span>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
{% macro render_field(field) %}
|
{% macro render_field(field) %}
|
||||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
|
||||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
||||||
|
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||||
{% if field.errors %}
|
{% if field.errors %}
|
||||||
<ul class=errors>
|
<ul class=errors>
|
||||||
{% for error in field.errors %}
|
{% for error in field.errors %}
|
||||||
@@ -25,18 +24,6 @@
|
|||||||
</div>
|
</div>
|
||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
{% macro render_field(field) %}
|
|
||||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
|
||||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
|
||||||
{% if field.errors %}
|
|
||||||
<ul class=errors>
|
|
||||||
{% for error in field.errors %}
|
|
||||||
<li>{{ error }}</li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
{% endmacro %}
|
|
||||||
|
|
||||||
{% macro render_simple_field(field) %}
|
{% macro render_simple_field(field) %}
|
||||||
<span class="label {% if field.errors %}error{% endif %}">{{ field.label }}</span>
|
<span class="label {% if field.errors %}error{% endif %}">{{ field.label }}</span>
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
{% macro pagination(sorted_watches, total_per_page, current_page) %}
|
|
||||||
{{ sorted_watches|length }}
|
|
||||||
|
|
||||||
{% for row in sorted_watches|batch(total_per_page, ' ') %}
|
|
||||||
{{ loop.index}}
|
|
||||||
{% endfor %}
|
|
||||||
{% endmacro %}
|
|
||||||
@@ -2,42 +2,42 @@
|
|||||||
<html lang="en" data-darkmode="{{ get_darkmode_state() }}">
|
<html lang="en" data-darkmode="{{ get_darkmode_state() }}">
|
||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8"/>
|
<meta charset="utf-8" >
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" >
|
||||||
<meta name="description" content="Self hosted website change detection."/>
|
<meta name="description" content="Self hosted website change detection." >
|
||||||
<title>Change Detection{{extra_title}}</title>
|
<title>Change Detection{{extra_title}}</title>
|
||||||
<link rel="alternate" type="application/rss+xml" title="Changedetection.io » Feed{% if active_tag %}- {{active_tag}}{% endif %}" href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}"/>
|
<link rel="alternate" type="application/rss+xml" title="Changedetection.io » Feed{% if active_tag %}- {{active_tag}}{% endif %}" href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}" >
|
||||||
<link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='pure-min.css')}}"/>
|
<link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='pure-min.css')}}" >
|
||||||
<link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='styles.css')}}"/>
|
<link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='styles.css')}}" >
|
||||||
{% if extra_stylesheets %}
|
{% if extra_stylesheets %}
|
||||||
{% for m in extra_stylesheets %}
|
{% for m in extra_stylesheets %}
|
||||||
<link rel="stylesheet" href="{{ m }}?ver=1000"/>
|
<link rel="stylesheet" href="{{ m }}?ver=1000" >
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
<link rel="apple-touch-icon" sizes="180x180" href="{{url_for('static_content', group='favicons', filename='apple-touch-icon.png')}}"/>
|
<link rel="apple-touch-icon" sizes="180x180" href="{{url_for('static_content', group='favicons', filename='apple-touch-icon.png')}}">
|
||||||
<link rel="icon" type="image/png" sizes="32x32" href="{{url_for('static_content', group='favicons', filename='favicon-32x32.png')}}"/>
|
<link rel="icon" type="image/png" sizes="32x32" href="{{url_for('static_content', group='favicons', filename='favicon-32x32.png')}}">
|
||||||
<link rel="icon" type="image/png" sizes="16x16" href="{{url_for('static_content', group='favicons', filename='favicon-16x16.png')}}"/>
|
<link rel="icon" type="image/png" sizes="16x16" href="{{url_for('static_content', group='favicons', filename='favicon-16x16.png')}}">
|
||||||
<link rel="manifest" href="{{url_for('static_content', group='favicons', filename='site.webmanifest')}}"/>
|
<link rel="manifest" href="{{url_for('static_content', group='favicons', filename='site.webmanifest')}}">
|
||||||
<link rel="mask-icon" href="{{url_for('static_content', group='favicons', filename='safari-pinned-tab.svg')}}" color="#5bbad5"/>
|
<link rel="mask-icon" href="{{url_for('static_content', group='favicons', filename='safari-pinned-tab.svg')}}" color="#5bbad5">
|
||||||
<link rel="shortcut icon" href="{{url_for('static_content', group='favicons', filename='favicon.ico')}}"/>
|
<link rel="shortcut icon" href="{{url_for('static_content', group='favicons', filename='favicon.ico')}}">
|
||||||
<meta name="msapplication-TileColor" content="#da532c"/>
|
<meta name="msapplication-TileColor" content="#da532c">
|
||||||
<meta name="msapplication-config" content="favicons/browserconfig.xml"/>
|
<meta name="msapplication-config" content="favicons/browserconfig.xml">
|
||||||
<meta name="theme-color" content="#ffffff"/>
|
<meta name="theme-color" content="#ffffff">
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
body::before {
|
body::before {
|
||||||
background-image: url({{url_for('static_content', group='images', filename='gradient-border.png') }});
|
background-image: url({{url_for('static_content', group='images', filename='gradient-border.png') }});
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
<div class="header">
|
<div class="header">
|
||||||
<div class="home-menu pure-menu pure-menu-horizontal pure-menu-fixed" id="nav-menu">
|
<div class="home-menu pure-menu pure-menu-horizontal pure-menu-fixed" id="nav-menu">
|
||||||
{% if has_password and not current_user.is_authenticated %}
|
{% if has_password and not current_user.is_authenticated %}
|
||||||
<a class="pure-menu-heading" href="https://github.com/dgtlmoon/changedetection.io" rel="noopener">
|
<a class="pure-menu-heading" href="https://changedetection.io" rel="noopener">
|
||||||
<strong>Change</strong>Detection.io</a>
|
<strong>Change</strong>Detection.io</a>
|
||||||
{% else %}
|
{% else %}
|
||||||
<a class="pure-menu-heading" href="{{url_for('index')}}">
|
<a class="pure-menu-heading" href="{{url_for('index')}}">
|
||||||
@@ -49,7 +49,7 @@
|
|||||||
{% else %}
|
{% else %}
|
||||||
{% if new_version_available and not(has_password and not current_user.is_authenticated) %}
|
{% if new_version_available and not(has_password and not current_user.is_authenticated) %}
|
||||||
<span id="new-version-text" class="pure-menu-heading">
|
<span id="new-version-text" class="pure-menu-heading">
|
||||||
<a href="https://github.com/dgtlmoon/changedetection.io">A new version is available</a>
|
<a href="https://changedetection.io">A new version is available</a>
|
||||||
</span>
|
</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@@ -58,6 +58,9 @@
|
|||||||
{% if current_user.is_authenticated or not has_password %}
|
{% if current_user.is_authenticated or not has_password %}
|
||||||
{% if not
|
{% if not
|
||||||
current_diff_url %}
|
current_diff_url %}
|
||||||
|
<li class="pure-menu-item">
|
||||||
|
<a href="{{ url_for('tags.tags_overview_page')}}" class="pure-menu-link">GROUPS</a>
|
||||||
|
</li>
|
||||||
<li class="pure-menu-item">
|
<li class="pure-menu-item">
|
||||||
<a href="{{ url_for('settings_page')}}" class="pure-menu-link">SETTINGS</a>
|
<a href="{{ url_for('settings_page')}}" class="pure-menu-link">SETTINGS</a>
|
||||||
</li>
|
</li>
|
||||||
@@ -74,7 +77,7 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
{% else %}
|
{% else %}
|
||||||
<li class="pure-menu-item">
|
<li class="pure-menu-item">
|
||||||
<a class="pure-menu-link" href="https://github.com/dgtlmoon/changedetection.io">Website Change Detection and Notification.</a>
|
<a class="pure-menu-link" href="https://changedetection.io">Website Change Detection and Notification.</a>
|
||||||
</li>
|
</li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if current_user.is_authenticated %}
|
{% if current_user.is_authenticated %}
|
||||||
@@ -82,11 +85,18 @@
|
|||||||
<a href="{{url_for('logout')}}" class="pure-menu-link">LOG OUT</a>
|
<a href="{{url_for('logout')}}" class="pure-menu-link">LOG OUT</a>
|
||||||
</li>
|
</li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
<li class="pure-menu-item pure-form" id="search-menu-item">
|
||||||
|
<!-- We use GET here so it offers people a chance to set bookmarks etc -->
|
||||||
|
<form name="searchForm" action="" method="GET">
|
||||||
|
<input id="search-q" class="" name="q" placeholder="URL or Title {% if active_tag %}in '{{ active_tag }}'{% endif %}" required="" type="text" value="">
|
||||||
|
<input name="tags" type="hidden" value="{% if active_tag %}{{active_tag}}{% endif %}">
|
||||||
|
<button class="toggle-button " id="toggle-search" type="button" title="Search, or Use Alt+S Key" >
|
||||||
|
{% include "svgs/search-icon.svg" %}
|
||||||
|
</button>
|
||||||
|
</form>
|
||||||
|
</li>
|
||||||
<li class="pure-menu-item">
|
<li class="pure-menu-item">
|
||||||
{% if dark_mode %}
|
<button class="toggle-button" id ="toggle-light-mode" type="button" title="Toggle Light/Dark Mode">
|
||||||
{% set darkClass = 'dark' %}
|
|
||||||
{% endif %}
|
|
||||||
<button class="toggle-theme {{darkClass}}" type="button" title="Toggle Light/Dark Mode">
|
|
||||||
<span class="visually-hidden">Toggle light/dark mode</span>
|
<span class="visually-hidden">Toggle light/dark mode</span>
|
||||||
<span class="icon-light">
|
<span class="icon-light">
|
||||||
{% include "svgs/light-mode-toggle-icon.svg" %}
|
{% include "svgs/light-mode-toggle-icon.svg" %}
|
||||||
@@ -106,7 +116,7 @@
|
|||||||
</div>
|
</div>
|
||||||
{% if hosted_sticky %}
|
{% if hosted_sticky %}
|
||||||
<div class="sticky-tab" id="hosted-sticky">
|
<div class="sticky-tab" id="hosted-sticky">
|
||||||
<a href="https://lemonade.changedetection.io/start?ref={{guid}}">Let us host your instance!</a>
|
<a href="https://changedetection.io/?ref={{guid}}">Let us host your instance!</a>
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if left_sticky %}
|
{% if left_sticky %}
|
||||||
@@ -137,16 +147,13 @@
|
|||||||
<li class="message">
|
<li class="message">
|
||||||
Share this link:
|
Share this link:
|
||||||
<span id="share-link">{{ session['share-link'] }}</span>
|
<span id="share-link">{{ session['share-link'] }}</span>
|
||||||
<img style="height: 1em; display: inline-block" src="{{url_for('static_content', group='images', filename='copy.svg')}}"/>
|
<img style="height: 1em; display: inline-block" src="{{url_for('static_content', group='images', filename='copy.svg')}}" >
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% block content %}{% endblock %}
|
{% block content %}{% endblock %}
|
||||||
</section>
|
</section>
|
||||||
<script
|
<script src="{{url_for('static_content', group='js', filename='toggle-theme.js')}}" defer></script>
|
||||||
type="text/javascript"
|
|
||||||
src="{{url_for('static_content', group='js', filename='toggle-theme.js')}}"
|
|
||||||
defer></script>
|
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
action="{{url_for('clear_all_history')}}"
|
action="{{url_for('clear_all_history')}}"
|
||||||
method="POST"
|
method="POST"
|
||||||
>
|
>
|
||||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" />
|
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
This will remove version history (snapshots) for ALL watches, but keep
|
This will remove version history (snapshots) for ALL watches, but keep
|
||||||
|
|||||||
@@ -6,8 +6,11 @@
|
|||||||
{% if last_error_screenshot %}
|
{% if last_error_screenshot %}
|
||||||
const error_screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid, error_screenshot=1) }}";
|
const error_screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid, error_screenshot=1) }}";
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
const highlight_submit_ignore_url="{{url_for('highlight_submit_ignore_url', uuid=uuid)}}";
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='diff-overview.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='diff-overview.js')}}" defer></script>
|
||||||
|
|
||||||
<div id="settings">
|
<div id="settings">
|
||||||
<h1>Differences</h1>
|
<h1>Differences</h1>
|
||||||
@@ -15,15 +18,15 @@
|
|||||||
<fieldset>
|
<fieldset>
|
||||||
|
|
||||||
<label for="diffWords" class="pure-checkbox">
|
<label for="diffWords" class="pure-checkbox">
|
||||||
<input type="radio" name="diff_type" id="diffWords" value="diffWords"/> Words</label>
|
<input type="radio" name="diff_type" id="diffWords" value="diffWords"> Words</label>
|
||||||
<label for="diffLines" class="pure-checkbox">
|
<label for="diffLines" class="pure-checkbox">
|
||||||
<input type="radio" name="diff_type" id="diffLines" value="diffLines" checked=""/> Lines</label>
|
<input type="radio" name="diff_type" id="diffLines" value="diffLines" checked=""> Lines</label>
|
||||||
|
|
||||||
<label for="diffChars" class="pure-checkbox">
|
<label for="diffChars" class="pure-checkbox">
|
||||||
<input type="radio" name="diff_type" id="diffChars" value="diffChars"/> Chars</label>
|
<input type="radio" name="diff_type" id="diffChars" value="diffChars"> Chars</label>
|
||||||
<!-- @todo - when mimetype is JSON, select this by default? -->
|
<!-- @todo - when mimetype is JSON, select this by default? -->
|
||||||
<label for="diffJson" class="pure-checkbox">
|
<label for="diffJson" class="pure-checkbox">
|
||||||
<input type="radio" name="diff_type" id="diffJson" value="diffJson" /> JSON</label>
|
<input type="radio" name="diff_type" id="diffJson" value="diffJson" > JSON</label>
|
||||||
|
|
||||||
{% if versions|length >= 1 %}
|
{% if versions|length >= 1 %}
|
||||||
<label for="diff-version">Compare newest (<span id="current-v-date"></span>) with</label>
|
<label for="diff-version">Compare newest (<span id="current-v-date"></span>) with</label>
|
||||||
@@ -43,7 +46,7 @@
|
|||||||
<span>
|
<span>
|
||||||
<!-- https://github.com/kpdecker/jsdiff/issues/389 ? -->
|
<!-- https://github.com/kpdecker/jsdiff/issues/389 ? -->
|
||||||
<label for="ignoreWhitespace" class="pure-checkbox" id="label-diff-ignorewhitespace">
|
<label for="ignoreWhitespace" class="pure-checkbox" id="label-diff-ignorewhitespace">
|
||||||
<input type="checkbox" id="ignoreWhitespace" name="ignoreWhitespace"/> Ignore Whitespace</label>
|
<input type="checkbox" id="ignoreWhitespace" name="ignoreWhitespace" > Ignore Whitespace</label>
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -51,7 +54,7 @@
|
|||||||
<a onclick="next_diff();">Jump</a>
|
<a onclick="next_diff();">Jump</a>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||||
<div class="tabs">
|
<div class="tabs">
|
||||||
<ul>
|
<ul>
|
||||||
{% if last_error_text %}<li class="tab" id="error-text-tab"><a href="#error-text">Error Text</a></li> {% endif %}
|
{% if last_error_text %}<li class="tab" id="error-text-tab"><a href="#error-text">Error Text</a></li> {% endif %}
|
||||||
@@ -72,11 +75,11 @@
|
|||||||
|
|
||||||
<div class="tab-pane-inner" id="error-screenshot">
|
<div class="tab-pane-inner" id="error-screenshot">
|
||||||
<div class="snapshot-age error">{{watch_a.snapshot_error_screenshot_ctime|format_seconds_ago}} seconds ago</div>
|
<div class="snapshot-age error">{{watch_a.snapshot_error_screenshot_ctime|format_seconds_ago}} seconds ago</div>
|
||||||
<img id="error-screenshot-img" style="max-width: 80%" alt="Current error-ing screenshot from most recent request"/>
|
<img id="error-screenshot-img" style="max-width: 80%" alt="Current error-ing screenshot from most recent request" >
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="tab-pane-inner" id="text">
|
<div class="tab-pane-inner" id="text">
|
||||||
<div class="tip">Pro-tip: Use <strong>show current snapshot</strong> tab to visualise what will be ignored.</div>
|
<div class="tip">Pro-tip: Use <strong>show current snapshot</strong> tab to visualise what will be ignored, highlight text to add to ignore filters</div>
|
||||||
|
|
||||||
{% if password_enabled_and_share_is_off %}
|
{% if password_enabled_and_share_is_off %}
|
||||||
<div class="tip">Pro-tip: You can enable <strong>"share access when password is enabled"</strong> from settings</div>
|
<div class="tip">Pro-tip: You can enable <strong>"share access when password is enabled"</strong> from settings</div>
|
||||||
@@ -91,7 +94,7 @@
|
|||||||
<td id="a" style="display: none;">{{previous}}</td>
|
<td id="a" style="display: none;">{{previous}}</td>
|
||||||
<td id="b" style="display: none;">{{newest}}</td>
|
<td id="b" style="display: none;">{{newest}}</td>
|
||||||
<td id="diff-col">
|
<td id="diff-col">
|
||||||
<span id="result"></span>
|
<span id="result" class="highlightable-filter"></span>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
@@ -105,7 +108,7 @@
|
|||||||
{% if is_html_webdriver %}
|
{% if is_html_webdriver %}
|
||||||
{% if screenshot %}
|
{% if screenshot %}
|
||||||
<div class="snapshot-age">{{watch_a.snapshot_screenshot_ctime|format_timestamp_timeago}}</div>
|
<div class="snapshot-age">{{watch_a.snapshot_screenshot_ctime|format_timestamp_timeago}}</div>
|
||||||
<img style="max-width: 80%" id="screenshot-img" alt="Current screenshot from most recent request"/>
|
<img style="max-width: 80%" id="screenshot-img" alt="Current screenshot from most recent request" >
|
||||||
{% else %}
|
{% else %}
|
||||||
No screenshot available just yet! Try rechecking the page.
|
No screenshot available just yet! Try rechecking the page.
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@@ -117,7 +120,7 @@
|
|||||||
<form id="extract-data-form" class="pure-form pure-form-stacked edit-form"
|
<form id="extract-data-form" class="pure-form pure-form-stacked edit-form"
|
||||||
action="{{ url_for('diff_history_page', uuid=uuid) }}#extract"
|
action="{{ url_for('diff_history_page', uuid=uuid) }}#extract"
|
||||||
method="POST">
|
method="POST">
|
||||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||||
|
|
||||||
<p>This tool will extract text data from all of the watch history.</p>
|
<p>This tool will extract text data from all of the watch history.</p>
|
||||||
|
|
||||||
@@ -149,9 +152,9 @@
|
|||||||
<script>
|
<script>
|
||||||
const newest_version_timestamp = {{newest_version_timestamp}};
|
const newest_version_timestamp = {{newest_version_timestamp}};
|
||||||
</script>
|
</script>
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='diff.min.js')}}"></script>
|
<script src="{{url_for('static_content', group='js', filename='diff.min.js')}}"></script>
|
||||||
|
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='diff-render.js')}}"></script>
|
<script src="{{url_for('static_content', group='js', filename='diff-render.js')}}"></script>
|
||||||
|
|
||||||
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@@ -2,29 +2,34 @@
|
|||||||
{% block content %}
|
{% block content %}
|
||||||
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
||||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||||
<script>
|
<script>
|
||||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
|
||||||
const watch_visual_selector_data_url="{{url_for('static_content', group='visual_selector_data', filename=uuid)}}";
|
|
||||||
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
|
|
||||||
const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %};
|
|
||||||
|
|
||||||
|
const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}');
|
||||||
|
const browser_steps_start_url="{{url_for('browser_steps.browsersteps_start_session', uuid=uuid)}}";
|
||||||
|
const browser_steps_sync_url="{{url_for('browser_steps.browsersteps_ui_update', uuid=uuid)}}";
|
||||||
{% if emailprefix %}
|
{% if emailprefix %}
|
||||||
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
|
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||||
|
const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %};
|
||||||
|
const recheck_proxy_start_url="{{url_for('check_proxies.start_check', uuid=uuid)}}";
|
||||||
|
const proxy_recheck_status_url="{{url_for('check_proxies.get_recheck_status', uuid=uuid)}}";
|
||||||
|
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
|
||||||
|
const watch_visual_selector_data_url="{{url_for('static_content', group='visual_selector_data', filename=uuid)}}";
|
||||||
|
|
||||||
const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}');
|
|
||||||
const browser_steps_sync_url="{{url_for('browser_steps.browsersteps_ui_update', uuid=uuid)}}";
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script>
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='limit.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='limit.js')}}" defer></script>
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='visual-selector.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='visual-selector.js')}}" defer></script>
|
||||||
{% if playwright_enabled %}
|
{% if playwright_enabled %}
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='browser-steps.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='browser-steps.js')}}" defer></script>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
<script src="{{url_for('static_content', group='js', filename='recheck-proxy.js')}}" defer></script>
|
||||||
|
|
||||||
<div class="edit-form monospaced-textarea">
|
<div class="edit-form monospaced-textarea">
|
||||||
|
|
||||||
<div class="tabs collapsable">
|
<div class="tabs collapsable">
|
||||||
@@ -50,7 +55,7 @@
|
|||||||
<div class="box-wrap inner">
|
<div class="box-wrap inner">
|
||||||
<form class="pure-form pure-form-stacked"
|
<form class="pure-form pure-form-stacked"
|
||||||
action="{{ url_for('edit_page', uuid=uuid, next = request.args.get('next'), unpause_on_save = request.args.get('unpause_on_save')) }}" method="POST">
|
action="{{ url_for('edit_page', uuid=uuid, next = request.args.get('next'), unpause_on_save = request.args.get('unpause_on_save')) }}" method="POST">
|
||||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||||
|
|
||||||
<div class="tab-pane-inner" id="general">
|
<div class="tab-pane-inner" id="general">
|
||||||
<fieldset>
|
<fieldset>
|
||||||
@@ -73,7 +78,7 @@
|
|||||||
{{ render_field(form.title, class="m-d") }}
|
{{ render_field(form.title, class="m-d") }}
|
||||||
</div>
|
</div>
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
{{ render_field(form.tag) }}
|
{{ render_field(form.tags) }}
|
||||||
<span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span>
|
<span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span>
|
||||||
</div>
|
</div>
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
@@ -109,7 +114,8 @@
|
|||||||
</div>
|
</div>
|
||||||
{% if form.proxy %}
|
{% if form.proxy %}
|
||||||
<div class="pure-control-group inline-radio">
|
<div class="pure-control-group inline-radio">
|
||||||
{{ render_field(form.proxy, class="fetch-backend-proxy") }}
|
<div>{{ form.proxy.label }} <a href="" id="check-all-proxies" class="pure-button button-secondary button-xsmall" >Check/Scan all</a></div>
|
||||||
|
<div>{{ form.proxy(class="fetch-backend-proxy") }}</div>
|
||||||
<span class="pure-form-message-inline">
|
<span class="pure-form-message-inline">
|
||||||
Choose a proxy for this watch
|
Choose a proxy for this watch
|
||||||
</span>
|
</span>
|
||||||
@@ -150,6 +156,17 @@
|
|||||||
{{ render_field(form.headers, rows=5, placeholder="Example
|
{{ render_field(form.headers, rows=5, placeholder="Example
|
||||||
Cookie: foobar
|
Cookie: foobar
|
||||||
User-Agent: wonderbra 1.0") }}
|
User-Agent: wonderbra 1.0") }}
|
||||||
|
|
||||||
|
<div class="pure-form-message-inline">
|
||||||
|
{% if has_extra_headers_file %}
|
||||||
|
<strong>Alert! Extra headers file found and will be added to this watch!</strong>
|
||||||
|
{% else %}
|
||||||
|
Headers can be also read from a file in your data-directory <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Adding-headers-from-an-external-file">Read more here</a>
|
||||||
|
{% endif %}
|
||||||
|
<br>
|
||||||
|
(Not supported by Selenium browser)
|
||||||
|
</div>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
<div class="pure-control-group" id="request-body">
|
<div class="pure-control-group" id="request-body">
|
||||||
{{ render_field(form.body, rows=5, placeholder="Example
|
{{ render_field(form.body, rows=5, placeholder="Example
|
||||||
@@ -163,7 +180,7 @@ User-Agent: wonderbra 1.0") }}
|
|||||||
</div>
|
</div>
|
||||||
{% if playwright_enabled %}
|
{% if playwright_enabled %}
|
||||||
<div class="tab-pane-inner" id="browser-steps">
|
<div class="tab-pane-inner" id="browser-steps">
|
||||||
<img class="beta-logo" src="{{url_for('static_content', group='images', filename='beta-logo.png')}}">
|
<img class="beta-logo" src="{{url_for('static_content', group='images', filename='beta-logo.png')}}" alt="New beta functionality">
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
<!--
|
<!--
|
||||||
@@ -186,11 +203,12 @@ User-Agent: wonderbra 1.0") }}
|
|||||||
<span class="loader" >
|
<span class="loader" >
|
||||||
<span id="browsersteps-click-start">
|
<span id="browsersteps-click-start">
|
||||||
<h2 >Click here to Start</h2>
|
<h2 >Click here to Start</h2>
|
||||||
Please allow 10-15 seconds for the browser to connect.
|
<svg style="height: 3.5rem;" version="1.1" viewBox="0 0 32 32" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><g id="Layer_1"/><g id="play_x5F_alt"><path d="M16,0C7.164,0,0,7.164,0,16s7.164,16,16,16s16-7.164,16-16S24.836,0,16,0z M10,24V8l16.008,8L10,24z" style="fill: var(--color-grey-400);"/></g></svg><br>
|
||||||
|
Please allow 10-15 seconds for the browser to connect.<br>
|
||||||
</span>
|
</span>
|
||||||
<div class="spinner" style="display: none;"></div>
|
<div class="spinner" style="display: none;"></div>
|
||||||
</span>
|
</span>
|
||||||
<img class="noselect" id="browsersteps-img" src="" style="max-width: 100%; width: 100%;" />
|
<img class="noselect" id="browsersteps-img" src="" style="max-width: 100%; width: 100%;" >
|
||||||
<canvas class="noselect" id="browsersteps-selector-canvas" style="max-width: 100%; width: 100%;"></canvas>
|
<canvas class="noselect" id="browsersteps-selector-canvas" style="max-width: 100%; width: 100%;"></canvas>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -220,7 +238,7 @@ User-Agent: wonderbra 1.0") }}
|
|||||||
<div class="field-group" id="notification-field-group">
|
<div class="field-group" id="notification-field-group">
|
||||||
{% if has_default_notification_urls %}
|
{% if has_default_notification_urls %}
|
||||||
<div class="inline-warning">
|
<div class="inline-warning">
|
||||||
<img class="inline-warning-icon" src="{{url_for('static_content', group='images', filename='notice.svg')}}" alt="Look out!" title="Lookout!"/>
|
<img class="inline-warning-icon" src="{{url_for('static_content', group='images', filename='notice.svg')}}" alt="Look out!" title="Lookout!" >
|
||||||
There are <a href="{{ url_for('settings_page')}}#notifications">system-wide notification URLs enabled</a>, this form will override notification settings for this watch only ‐ an empty Notification URL list here will still send notifications.
|
There are <a href="{{ url_for('settings_page')}}#notifications">system-wide notification URLs enabled</a>, this form will override notification settings for this watch only ‐ an empty Notification URL list here will still send notifications.
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@@ -360,15 +378,16 @@ Unavailable") }}
|
|||||||
{{ render_field(form.extract_text, rows=5, placeholder="\d+ online") }}
|
{{ render_field(form.extract_text, rows=5, placeholder="\d+ online") }}
|
||||||
<span class="pure-form-message-inline">
|
<span class="pure-form-message-inline">
|
||||||
<ul>
|
<ul>
|
||||||
<li>Extracts text in the final output (line by line) after other filters using regular expressions;
|
<li>Extracts text in the final output (line by line) after other filters using regular expressions or string match;
|
||||||
<ul>
|
<ul>
|
||||||
<li>Regular expression ‐ example <code>/reports.+?2022/i</code></li>
|
<li>Regular expression ‐ example <code>/reports.+?2022/i</code></li>
|
||||||
|
<li>Don't forget to consider the white-space at the start of a line <code>/.+?reports.+?2022/i</code></li>
|
||||||
<li>Use <code>//(?aiLmsux))</code> type flags (more <a href="https://docs.python.org/3/library/re.html#index-15">information here</a>)<br></li>
|
<li>Use <code>//(?aiLmsux))</code> type flags (more <a href="https://docs.python.org/3/library/re.html#index-15">information here</a>)<br></li>
|
||||||
<li>Keyword example ‐ example <code>Out of stock</code></li>
|
<li>Keyword example ‐ example <code>Out of stock</code></li>
|
||||||
<li>Use groups to extract just that text ‐ example <code>/reports.+?(\d+)/i</code> returns a list of years only</li>
|
<li>Use groups to extract just that text ‐ example <code>/reports.+?(\d+)/i</code> returns a list of years only</li>
|
||||||
</ul>
|
</ul>
|
||||||
</li>
|
</li>
|
||||||
<li>One line per regular-expression/ string match</li>
|
<li>One line per regular-expression/string match</li>
|
||||||
</ul>
|
</ul>
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
@@ -390,7 +409,7 @@ Unavailable") }}
|
|||||||
|
|
||||||
{% if watch['processor'] == 'text_json_diff' %}
|
{% if watch['processor'] == 'text_json_diff' %}
|
||||||
<div class="tab-pane-inner visual-selector-ui" id="visualselector">
|
<div class="tab-pane-inner visual-selector-ui" id="visualselector">
|
||||||
<img class="beta-logo" src="{{url_for('static_content', group='images', filename='beta-logo.png')}}">
|
<img class="beta-logo" src="{{url_for('static_content', group='images', filename='beta-logo.png')}}" alt="New beta functionality">
|
||||||
|
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
@@ -407,7 +426,7 @@ Unavailable") }}
|
|||||||
<!-- request the screenshot and get the element offset info ready -->
|
<!-- request the screenshot and get the element offset info ready -->
|
||||||
<!-- use img src ready load to know everything is ready to map out -->
|
<!-- use img src ready load to know everything is ready to map out -->
|
||||||
<!-- @todo: maybe something interesting like a field to select 'elements that contain text... and their parents n' -->
|
<!-- @todo: maybe something interesting like a field to select 'elements that contain text... and their parents n' -->
|
||||||
<img id="selector-background" />
|
<img id="selector-background" >
|
||||||
<canvas id="selector-canvas"></canvas>
|
<canvas id="selector-canvas"></canvas>
|
||||||
</div>
|
</div>
|
||||||
<div id="selector-current-xpath" style="overflow-x: hidden"><strong>Currently:</strong> <span class="text">Loading...</span></div>
|
<div id="selector-current-xpath" style="overflow-x: hidden"><strong>Currently:</strong> <span class="text">Loading...</span></div>
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{% extends 'base.html' %}
|
{% extends 'base.html' %}
|
||||||
{% block content %}
|
{% block content %}
|
||||||
{% from '_helpers.jinja' import render_field %}
|
{% from '_helpers.jinja' import render_field %}
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||||
<div class="edit-form monospaced-textarea">
|
<div class="edit-form monospaced-textarea">
|
||||||
|
|
||||||
<div class="tabs collapsable">
|
<div class="tabs collapsable">
|
||||||
@@ -13,7 +13,7 @@
|
|||||||
|
|
||||||
<div class="box-wrap inner">
|
<div class="box-wrap inner">
|
||||||
<form class="pure-form pure-form-aligned" action="{{url_for('import_page')}}" method="POST">
|
<form class="pure-form pure-form-aligned" action="{{url_for('import_page')}}" method="POST">
|
||||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||||
<div class="tab-pane-inner" id="url-list">
|
<div class="tab-pane-inner" id="url-list">
|
||||||
<legend>
|
<legend>
|
||||||
Enter one URL per line, and optionally add tags for each URL after a space, delineated by comma
|
Enter one URL per line, and optionally add tags for each URL after a space, delineated by comma
|
||||||
|
|||||||
@@ -4,13 +4,13 @@
|
|||||||
<div class="login-form">
|
<div class="login-form">
|
||||||
<div class="inner">
|
<div class="inner">
|
||||||
<form class="pure-form pure-form-stacked" action="{{url_for('login')}}" method="POST">
|
<form class="pure-form pure-form-stacked" action="{{url_for('login')}}" method="POST">
|
||||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
<label for="password">Password</label>
|
<label for="password">Password</label>
|
||||||
<input type="password" id="password" required="" name="password" value=""
|
<input type="password" id="password" required="" name="password" value=""
|
||||||
size="15" autofocus />
|
size="15" autofocus />
|
||||||
<input type="hidden" id="email" name="email" value="defaultuser@changedetection.io" />
|
<input type="hidden" id="email" name="email" value="defaultuser@changedetection.io" >
|
||||||
</div>
|
</div>
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
<button type="submit" class="pure-button pure-button-primary">Login</button>
|
<button type="submit" class="pure-button pure-button-primary">Login</button>
|
||||||
|
|||||||
@@ -6,10 +6,11 @@
|
|||||||
{% if last_error_screenshot %}
|
{% if last_error_screenshot %}
|
||||||
const error_screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid, error_screenshot=1) }}";
|
const error_screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid, error_screenshot=1) }}";
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
const highlight_submit_ignore_url="{{url_for('highlight_submit_ignore_url', uuid=uuid)}}";
|
||||||
</script>
|
</script>
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='diff-overview.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='diff-overview.js')}}" defer></script>
|
||||||
|
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||||
<div class="tabs">
|
<div class="tabs">
|
||||||
<ul>
|
<ul>
|
||||||
{% if last_error_text %}<li class="tab" id="error-text-tab"><a href="#error-text">Error Text</a></li> {% endif %}
|
{% if last_error_text %}<li class="tab" id="error-text-tab"><a href="#error-text">Error Text</a></li> {% endif %}
|
||||||
@@ -20,7 +21,7 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
|
<form><input type="hidden" name="csrf_token" value="{{ csrf_token() }}"></form>
|
||||||
<div id="diff-ui">
|
<div id="diff-ui">
|
||||||
<div class="tab-pane-inner" id="error-text">
|
<div class="tab-pane-inner" id="error-text">
|
||||||
<div class="snapshot-age error">{{watch.error_text_ctime|format_seconds_ago}} seconds ago</div>
|
<div class="snapshot-age error">{{watch.error_text_ctime|format_seconds_ago}} seconds ago</div>
|
||||||
@@ -31,16 +32,17 @@
|
|||||||
|
|
||||||
<div class="tab-pane-inner" id="error-screenshot">
|
<div class="tab-pane-inner" id="error-screenshot">
|
||||||
<div class="snapshot-age error">{{watch.snapshot_error_screenshot_ctime|format_seconds_ago}} seconds ago</div>
|
<div class="snapshot-age error">{{watch.snapshot_error_screenshot_ctime|format_seconds_ago}} seconds ago</div>
|
||||||
<img id="error-screenshot-img" style="max-width: 80%" alt="Current erroring screenshot from most recent request"/>
|
<img id="error-screenshot-img" style="max-width: 80%" alt="Current erroring screenshot from most recent request" >
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="tab-pane-inner" id="text">
|
<div class="tab-pane-inner" id="text">
|
||||||
<div class="snapshot-age">{{watch.snapshot_text_ctime|format_timestamp_timeago}}</div>
|
<div class="snapshot-age">{{watch.snapshot_text_ctime|format_timestamp_timeago}}</div>
|
||||||
<span class="ignored">Grey lines are ignored</span> <span class="triggered">Blue lines are triggers</span>
|
<span class="ignored">Grey lines are ignored</span> <span class="triggered">Blue lines are triggers</span> <span class="tip"><strong>Pro-tip</strong>: Highlight text to add to ignore filters</span>
|
||||||
|
|
||||||
<table>
|
<table>
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td id="diff-col">
|
<td id="diff-col" class="highlightable-filter">
|
||||||
{% for row in content %}
|
{% for row in content %}
|
||||||
<div class="{{row.classes}}">{{row.line}}</div>
|
<div class="{{row.classes}}">{{row.line}}</div>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
@@ -58,7 +60,7 @@
|
|||||||
{% if is_html_webdriver %}
|
{% if is_html_webdriver %}
|
||||||
{% if screenshot %}
|
{% if screenshot %}
|
||||||
<div class="snapshot-age">{{watch.snapshot_screenshot_ctime|format_timestamp_timeago}}</div>
|
<div class="snapshot-age">{{watch.snapshot_screenshot_ctime|format_timestamp_timeago}}</div>
|
||||||
<img style="max-width: 80%" id="screenshot-img" alt="Current screenshot from most recent request"/>
|
<img style="max-width: 80%" id="screenshot-img" alt="Current screenshot from most recent request" >
|
||||||
{% else %}
|
{% else %}
|
||||||
No screenshot available just yet! Try rechecking the page.
|
No screenshot available just yet! Try rechecking the page.
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|||||||
@@ -9,10 +9,10 @@
|
|||||||
const email_notification_prefix=JSON.parse('{{emailprefix|tojson}}');
|
const email_notification_prefix=JSON.parse('{{emailprefix|tojson}}');
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</script>
|
</script>
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||||
|
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='global-settings.js')}}" defer></script>
|
<script src="{{url_for('static_content', group='js', filename='global-settings.js')}}" defer></script>
|
||||||
<div class="edit-form">
|
<div class="edit-form">
|
||||||
<div class="tabs collapsable">
|
<div class="tabs collapsable">
|
||||||
<ul>
|
<ul>
|
||||||
@@ -26,7 +26,7 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="box-wrap inner">
|
<div class="box-wrap inner">
|
||||||
<form class="pure-form pure-form-stacked settings" action="{{url_for('settings_page')}}" method="POST">
|
<form class="pure-form pure-form-stacked settings" action="{{url_for('settings_page')}}" method="POST">
|
||||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||||
<div class="tab-pane-inner" id="general">
|
<div class="tab-pane-inner" id="general">
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
@@ -63,12 +63,8 @@
|
|||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
{{ render_field(form.application.form.base_url, placeholder="http://yoursite.com:5000/",
|
{{ render_field(form.application.form.pager_size) }}
|
||||||
class="m-d") }}
|
<span class="pure-form-message-inline">Number of items per page in the watch overview list, 0 to disable.</span>
|
||||||
<span class="pure-form-message-inline">
|
|
||||||
Base URL used for the <code>{{ '{{ base_url }}' }}</code> token in notifications and RSS links.<br>Default value is the ENV var 'BASE_URL' (Currently "{{settings_application['current_base_url']}}"),
|
|
||||||
<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Configurable-BASE_URL-setting">read more here</a>.
|
|
||||||
</span>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
@@ -96,6 +92,13 @@
|
|||||||
{{ render_common_settings_form(form.application.form, emailprefix, settings_application) }}
|
{{ render_common_settings_form(form.application.form, emailprefix, settings_application) }}
|
||||||
</div>
|
</div>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
<div class="pure-control-group" id="notification-base-url">
|
||||||
|
{{ render_field(form.application.form.base_url, class="m-d") }}
|
||||||
|
<span class="pure-form-message-inline">
|
||||||
|
Base URL used for the <code>{{ '{{ base_url }}' }}</code> token in notification links.<br>
|
||||||
|
Default value is the system environment variable '<code>BASE_URL</code>' - <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Configurable-BASE_URL-setting">read more here</a>.
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="tab-pane-inner" id="fetching">
|
<div class="tab-pane-inner" id="fetching">
|
||||||
@@ -177,20 +180,56 @@ nav
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="tab-pane-inner" id="proxies">
|
<div class="tab-pane-inner" id="proxies">
|
||||||
|
<div id="recommended-proxy">
|
||||||
|
<div>
|
||||||
|
<img style="height: 2em;" src="{{url_for('static_content', group='images', filename='brightdata.svg')}}" alt="BrightData Proxy Provider">
|
||||||
|
<p>BrightData offer world-class proxy services, "Data Center" proxies are a very affordable way to proxy your requests, whilst <strong><a href="https://brightdata.grsm.io/n0r16zf7eivq">WebUnlocker</a></strong> can help solve most CAPTCHAs.</p>
|
||||||
|
<p>
|
||||||
|
BrightData offer many <a href="https://brightdata.com/proxy-types" target="new">many different types of proxies</a>, it is worth reading about what is best for your use-case.
|
||||||
|
</p>
|
||||||
|
|
||||||
<p><strong>Tip</strong>: You can connect to websites using <a href="https://brightdata.grsm.io/n0r16zf7eivq">BrightData</a> proxies, their service <strong>WebUnlocker</strong> will solve most CAPTCHAs, whilst their <strong>Residential Proxies</strong> may help to avoid CAPTCHA altogether. </p>
|
<p>
|
||||||
<p>It may be easier to try <strong>WebUnlocker</strong> first, WebUnlocker also supports country selection.</p>
|
When you have <a href="https://brightdata.grsm.io/n0r16zf7eivq">registered</a>, enabled the required services, visit the <A href="https://brightdata.com/cp/api_example?">API example page</A>, then select <strong>Python</strong>, set the country you wish to use, then copy+paste the access Proxy URL into the "Extra Proxies" boxes below.<br>
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
The Proxy URL with BrightData should start with <code>http://brd-customer...</code>
|
||||||
|
</p>
|
||||||
|
<p>When you sign up using <a href="https://brightdata.grsm.io/n0r16zf7eivq">https://brightdata.grsm.io/n0r16zf7eivq</a> BrightData will match any first deposit up to $150</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<img style="height: 2em;"
|
||||||
|
src="{{url_for('static_content', group='images', filename='oxylabs.svg')}}"
|
||||||
|
alt="Oxylabs Proxy Provider">
|
||||||
|
<p>
|
||||||
|
Collect public data at scale with industry-leading web scraping solutions and the world’s
|
||||||
|
largest ethical proxy network.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
Oxylabs also provide a <a href="https://oxylabs.io/products/web-unblocker"><strong>WebUnlocker</strong></a>
|
||||||
|
proxy that bypasses sophisticated anti-bot systems, so you don’t have to.<br>
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
Serve over <a href="https://oxylabs.io/location-proxy">195 countries</a>, providing <a
|
||||||
|
href="https://oxylabs.io/products/residential-proxy-pool">Residential</a>, <a
|
||||||
|
href="https://oxylabs.io/products/mobile-proxies">Mobile</a> and <a
|
||||||
|
href="https://oxylabs.io/products/rotating-isp-proxies">ISP proxies</a> and much more.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
Use the promo code <strong>boost35</strong> with this link <a href="https://oxylabs.go2cloud.org/SH2d">https://oxylabs.go2cloud.org/SH2d</a> for 35% off Residential, Mobile proxies, Web Unblocker, and Scraper APIs. Built-in proxies enable you to access data from all around the world and help overcome anti-bot solutions.
|
||||||
|
|
||||||
|
</p>
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<p>
|
<p>
|
||||||
When you have <a href="https://brightdata.grsm.io/n0r16zf7eivq">registered</a>, enabled the required services, visit the <A href="https://brightdata.com/cp/api_example?">API example page</A>, then select <strong>Python</strong>, set the country you wish to use, then copy+paste the example URL below<br>
|
Your proxy provider may need to whitelist our IP of <code>204.15.192.195</code>
|
||||||
The Proxy URL with BrightData should start with <code>http://brd-customer...</code>
|
|
||||||
</p>
|
</p>
|
||||||
|
<p><strong>Tip</strong>: "Residential" and "Mobile" proxy type can be more successfull than "Data Center" for blocked websites.
|
||||||
<p>When you sign up using <a href="https://brightdata.grsm.io/n0r16zf7eivq">https://brightdata.grsm.io/n0r16zf7eivq</a> BrightData will match any first deposit up to $150</p>
|
|
||||||
|
|
||||||
|
|
||||||
<div class="pure-control-group">
|
<div class="pure-control-group">
|
||||||
{{ render_field(form.requests.form.extra_proxies) }}
|
{{ render_field(form.requests.form.extra_proxies) }}
|
||||||
<span class="pure-form-message-inline">"Name" will be used for selecting the proxy in the Watch Edit settings</span>
|
<span class="pure-form-message-inline">"Name" will be used for selecting the proxy in the Watch Edit settings</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div id="actions">
|
<div id="actions">
|
||||||
|
|||||||
1
changedetectionio/templates/svgs/search-icon.svg
Normal file
1
changedetectionio/templates/svgs/search-icon.svg
Normal file
@@ -0,0 +1 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?><svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 122.879 119.799" enable-background="new 0 0 122.879 119.799" xml:space="preserve"><g><path d="M49.988,0h0.016v0.007C63.803,0.011,76.298,5.608,85.34,14.652c9.027,9.031,14.619,21.515,14.628,35.303h0.007v0.033v0.04 h-0.007c-0.005,5.557-0.917,10.905-2.594,15.892c-0.281,0.837-0.575,1.641-0.877,2.409v0.007c-1.446,3.66-3.315,7.12-5.547,10.307 l29.082,26.139l0.018,0.016l0.157,0.146l0.011,0.011c1.642,1.563,2.536,3.656,2.649,5.78c0.11,2.1-0.543,4.248-1.979,5.971 l-0.011,0.016l-0.175,0.203l-0.035,0.035l-0.146,0.16l-0.016,0.021c-1.565,1.642-3.654,2.534-5.78,2.646 c-2.097,0.111-4.247-0.54-5.971-1.978l-0.015-0.011l-0.204-0.175l-0.029-0.024L78.761,90.865c-0.88,0.62-1.778,1.209-2.687,1.765 c-1.233,0.755-2.51,1.466-3.813,2.115c-6.699,3.342-14.269,5.222-22.272,5.222v0.007h-0.016v-0.007 c-13.799-0.004-26.296-5.601-35.338-14.645C5.605,76.291,0.016,63.805,0.007,50.021H0v-0.033v-0.016h0.007 c0.004-13.799,5.601-26.296,14.645-35.338C23.683,5.608,36.167,0.016,49.955,0.007V0H49.988L49.988,0z M50.004,11.21v0.007h-0.016 h-0.033V11.21c-10.686,0.007-20.372,4.35-27.384,11.359C15.56,29.578,11.213,39.274,11.21,49.973h0.007v0.016v0.033H11.21 c0.007,10.686,4.347,20.367,11.359,27.381c7.009,7.012,16.705,11.359,27.403,11.361v-0.007h0.016h0.033v0.007 c10.686-0.007,20.368-4.348,27.382-11.359c7.011-7.009,11.358-16.702,11.36-27.4h-0.006v-0.016v-0.033h0.006 c-0.006-10.686-4.35-20.372-11.358-27.384C70.396,15.56,60.703,11.213,50.004,11.21L50.004,11.21z"/></g></svg>
|
||||||
|
After Width: | Height: | Size: 1.6 KiB |
@@ -1,20 +1,19 @@
|
|||||||
{% extends 'base.html' %}
|
{% extends 'base.html' %}
|
||||||
{% block content %}
|
{% block content %}
|
||||||
{% from '_helpers.jinja' import render_simple_field, render_field %}
|
{% from '_helpers.jinja' import render_simple_field, render_field %}
|
||||||
{% from '_pagination.jinja' import pagination %}
|
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
<script src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script>
|
||||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script>
|
|
||||||
|
|
||||||
<div class="box">
|
<div class="box">
|
||||||
|
|
||||||
<form class="pure-form" action="{{ url_for('form_quick_watch_add') }}" method="POST" id="new-watch-form">
|
<form class="pure-form" action="{{ url_for('form_quick_watch_add') }}" method="POST" id="new-watch-form">
|
||||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<legend>Add a new change detection watch</legend>
|
<legend>Add a new change detection watch</legend>
|
||||||
<div id="watch-add-wrapper-zone">
|
<div id="watch-add-wrapper-zone">
|
||||||
<div>
|
<div>
|
||||||
{{ render_simple_field(form.url, placeholder="https://...", required=true) }}
|
{{ render_simple_field(form.url, placeholder="https://...", required=true) }}
|
||||||
{{ render_simple_field(form.tag, value=active_tag if active_tag else '', placeholder="watch label / tag") }}
|
{{ render_simple_field(form.tags, value=tags[active_tag].title if active_tag else '', placeholder="watch label / tag") }}
|
||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
{{ render_simple_field(form.watch_submit_button, title="Watch this URL!" ) }}
|
{{ render_simple_field(form.watch_submit_button, title="Watch this URL!" ) }}
|
||||||
@@ -26,41 +25,50 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
</fieldset>
|
</fieldset>
|
||||||
<span style="color:#eee; font-size: 80%;"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread-white.svg')}}" /> Tip: You can also add 'shared' watches. <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Sharing-a-Watch">More info</a></a></span>
|
<span style="color:#eee; font-size: 80%;"><img alt="Create a shareable link" style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread-white.svg')}}" > Tip: You can also add 'shared' watches. <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Sharing-a-Watch">More info</a></span>
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
<form class="pure-form" action="{{ url_for('form_watch_list_checkbox_operations') }}" method="POST" id="watch-list-form">
|
<form class="pure-form" action="{{ url_for('form_watch_list_checkbox_operations') }}" method="POST" id="watch-list-form">
|
||||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||||
|
<input type="hidden" id="op_extradata" name="op_extradata" value="" >
|
||||||
<div id="checkbox-operations">
|
<div id="checkbox-operations">
|
||||||
<button class="pure-button button-secondary button-xsmall" name="op" value="pause">Pause</button>
|
<button class="pure-button button-secondary button-xsmall" name="op" value="pause">Pause</button>
|
||||||
<button class="pure-button button-secondary button-xsmall" name="op" value="unpause">UnPause</button>
|
<button class="pure-button button-secondary button-xsmall" name="op" value="unpause">UnPause</button>
|
||||||
<button class="pure-button button-secondary button-xsmall" name="op" value="mute">Mute</button>
|
<button class="pure-button button-secondary button-xsmall" name="op" value="mute">Mute</button>
|
||||||
<button class="pure-button button-secondary button-xsmall" name="op" value="unmute">UnMute</button>
|
<button class="pure-button button-secondary button-xsmall" name="op" value="unmute">UnMute</button>
|
||||||
<button class="pure-button button-secondary button-xsmall" name="op" value="recheck">Recheck</button>
|
<button class="pure-button button-secondary button-xsmall" name="op" value="recheck">Recheck</button>
|
||||||
|
<button class="pure-button button-secondary button-xsmall" name="op" value="assign-tag" id="checkbox-assign-tag">Tag</button>
|
||||||
|
<button class="pure-button button-secondary button-xsmall" name="op" value="mark-viewed">Mark viewed</button>
|
||||||
<button class="pure-button button-secondary button-xsmall" name="op" value="notification-default">Use default notification</button>
|
<button class="pure-button button-secondary button-xsmall" name="op" value="notification-default">Use default notification</button>
|
||||||
<button class="pure-button button-secondary button-xsmall" style="background: #dd4242; font-size: 70%" name="op" value="delete">Delete</button>
|
<button class="pure-button button-secondary button-xsmall" style="background: #dd4242;" name="op" value="clear-history">Clear/reset history</button>
|
||||||
|
<button class="pure-button button-secondary button-xsmall" style="background: #dd4242;" name="op" value="delete">Delete</button>
|
||||||
</div>
|
</div>
|
||||||
|
{% if watches|length >= pagination.per_page %}
|
||||||
|
{{ pagination.info }}
|
||||||
|
{% endif %}
|
||||||
|
{% if search_q %}<div id="search-result-info">Searching "<strong><i>{{search_q}}</i></strong>"</div>{% endif %}
|
||||||
<div>
|
<div>
|
||||||
<a href="{{url_for('index')}}" class="pure-button button-tag {{'active' if not active_tag }}">All</a>
|
<a href="{{url_for('index')}}" class="pure-button button-tag {{'active' if not active_tag }}">All</a>
|
||||||
{% for tag in tags %}
|
{% for uuid, tag in tags.items() %}
|
||||||
{% if tag != "" %}
|
{% if tag != "" %}
|
||||||
<a href="{{url_for('index', tag=tag) }}" class="pure-button button-tag {{'active' if active_tag == tag }}">{{ tag }}</a>
|
<a href="{{url_for('index', tag=uuid) }}" class="pure-button button-tag {{'active' if active_tag == uuid }}">{{ tag.title }}</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{% set sort_order = request.args.get('order', 'asc') == 'asc' %}
|
{% set sort_order = sort_order or 'asc' %}
|
||||||
{% set sort_attribute = request.args.get('sort', 'last_changed') %}
|
{% set sort_attribute = sort_attribute or 'last_changed' %}
|
||||||
{% set pagination_page = request.args.get('page', 0) %}
|
{% set pagination_page = request.args.get('page', 0) %}
|
||||||
|
|
||||||
<div id="watch-table-wrapper">
|
<div id="watch-table-wrapper">
|
||||||
|
|
||||||
<table class="pure-table pure-table-striped watch-table">
|
<table class="pure-table pure-table-striped watch-table">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th><input style="vertical-align: middle" type="checkbox" id="check-all"/> #</th>
|
{% set link_order = "desc" if sort_order == 'asc' else "asc" %}
|
||||||
<th></th>
|
|
||||||
{% set link_order = "desc" if sort_order else "asc" %}
|
|
||||||
{% set arrow_span = "" %}
|
{% set arrow_span = "" %}
|
||||||
|
<th><input style="vertical-align: middle" type="checkbox" id="check-all" > <a class="{{ 'active '+link_order if sort_attribute == 'date_created' else 'inactive' }}" href="{{url_for('index', sort='date_created', order=link_order, tag=active_tag)}}"># <span class='arrow {{link_order}}'></span></a></th>
|
||||||
|
<th></th>
|
||||||
<th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('index', sort='label', order=link_order, tag=active_tag)}}">Website <span class='arrow {{link_order}}'></span></a></th>
|
<th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('index', sort='label', order=link_order, tag=active_tag)}}">Website <span class='arrow {{link_order}}'></span></a></th>
|
||||||
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_checked' else 'inactive' }}" href="{{url_for('index', sort='last_checked', order=link_order, tag=active_tag)}}">Last Checked <span class='arrow {{link_order}}'></span></a></th>
|
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_checked' else 'inactive' }}" href="{{url_for('index', sort='last_checked', order=link_order, tag=active_tag)}}">Last Checked <span class='arrow {{link_order}}'></span></a></th>
|
||||||
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_changed' else 'inactive' }}" href="{{url_for('index', sort='last_changed', order=link_order, tag=active_tag)}}">Last Changed <span class='arrow {{link_order}}'></span></a></th>
|
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_changed' else 'inactive' }}" href="{{url_for('index', sort='last_changed', order=link_order, tag=active_tag)}}">Last Changed <span class='arrow {{link_order}}'></span></a></th>
|
||||||
@@ -68,13 +76,12 @@
|
|||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
|
{% if not watches|length %}
|
||||||
{% set sorted_watches = watches|sort(attribute=sort_attribute, reverse=sort_order) %}
|
<tr>
|
||||||
{% for watch in sorted_watches %}
|
<td colspan="6">No website watches configured, please add a URL in the box above, or <a href="{{ url_for('import_page')}}" >import a list</a>.</td>
|
||||||
|
</tr>
|
||||||
{# WIP for pagination, disabled for now
|
{% endif %}
|
||||||
{% if not ( loop.index >= 3 and loop.index <=4) %}{% continue %}{% endif %} -->
|
{% for watch in (watches|sort(attribute=sort_attribute, reverse=sort_order == 'asc'))|pagination_slice(skip=pagination.skip) %}
|
||||||
#}
|
|
||||||
<tr id="{{ watch.uuid }}"
|
<tr id="{{ watch.uuid }}"
|
||||||
class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }} processor-{{ watch['processor'] }}
|
class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }} processor-{{ watch['processor'] }}
|
||||||
{% if watch.last_error is defined and watch.last_error != False %}error{% endif %}
|
{% if watch.last_error is defined and watch.last_error != False %}error{% endif %}
|
||||||
@@ -82,26 +89,26 @@
|
|||||||
{% if watch.paused is defined and watch.paused != False %}paused{% endif %}
|
{% if watch.paused is defined and watch.paused != False %}paused{% endif %}
|
||||||
{% if watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %}unviewed{% endif %}
|
{% if watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %}unviewed{% endif %}
|
||||||
{% if watch.uuid in queued_uuids %}queued{% endif %}">
|
{% if watch.uuid in queued_uuids %}queued{% endif %}">
|
||||||
<td class="inline checkbox-uuid" ><input name="uuids" type="checkbox" value="{{ watch.uuid}} "/> <span>{{ loop.index }}</span></td>
|
<td class="inline checkbox-uuid" ><input name="uuids" type="checkbox" value="{{ watch.uuid}} " > <span>{{ loop.index+pagination.skip }}</span></td>
|
||||||
<td class="inline watch-controls">
|
<td class="inline watch-controls">
|
||||||
{% if not watch.paused %}
|
{% if not watch.paused %}
|
||||||
<a class="state-off" href="{{url_for('index', op='pause', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause checks" title="Pause checks" class="icon icon-pause"/></a>
|
<a class="state-off" href="{{url_for('index', op='pause', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause checks" title="Pause checks" class="icon icon-pause" ></a>
|
||||||
{% else %}
|
{% else %}
|
||||||
<a class="state-on" href="{{url_for('index', op='pause', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='play.svg')}}" alt="UnPause checks" title="UnPause checks" class="icon icon-unpause"/></a>
|
<a class="state-on" href="{{url_for('index', op='pause', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='play.svg')}}" alt="UnPause checks" title="UnPause checks" class="icon icon-unpause" ></a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<a class="link-mute state-{{'on' if watch.notification_muted else 'off'}}" href="{{url_for('index', op='mute', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications" class="icon icon-mute"/></a>
|
<a class="link-mute state-{{'on' if watch.notification_muted else 'off'}}" href="{{url_for('index', op='mute', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications" class="icon icon-mute" ></a>
|
||||||
</td>
|
</td>
|
||||||
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
|
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
|
||||||
<a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}"></a>
|
<a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}"></a>
|
||||||
<a class="link-spread" href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img class="status-icon" src="{{url_for('static_content', group='images', filename='spread.svg')}}" class="status-icon icon icon-spread" title="Create a link to share watch config with others" /></a>
|
<a class="link-spread" href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img src="{{url_for('static_content', group='images', filename='spread.svg')}}" class="status-icon icon icon-spread" title="Create a link to share watch config with others" ></a>
|
||||||
|
|
||||||
{% if watch.get_fetch_backend == "html_webdriver"
|
{% if watch.get_fetch_backend == "html_webdriver"
|
||||||
or ( watch.get_fetch_backend == "system" and system_default_fetcher == 'html_webdriver' )
|
or ( watch.get_fetch_backend == "system" and system_default_fetcher == 'html_webdriver' )
|
||||||
%}
|
%}
|
||||||
<img class="status-icon" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" title="Using a chrome browser" />
|
<img class="status-icon" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" title="Using a chrome browser" >
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{%if watch.is_pdf %}<img class="status-icon" src="{{url_for('static_content', group='images', filename='pdf-icon.svg')}}" title="Converting PDF to text" />{% endif %}
|
{%if watch.is_pdf %}<img class="status-icon" src="{{url_for('static_content', group='images', filename='pdf-icon.svg')}}" title="Converting PDF to text" >{% endif %}
|
||||||
{% if watch.last_error is defined and watch.last_error != False %}
|
{% if watch.last_error is defined and watch.last_error != False %}
|
||||||
<div class="fetch-error">{{ watch.last_error }}
|
<div class="fetch-error">{{ watch.last_error }}
|
||||||
|
|
||||||
@@ -112,6 +119,9 @@
|
|||||||
<a href="{{ url_for('settings_page', uuid=watch.uuid) }}#proxies">Try adding external proxies/locations</a>
|
<a href="{{ url_for('settings_page', uuid=watch.uuid) }}#proxies">Try adding external proxies/locations</a>
|
||||||
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
{% if 'empty result or contain only an image' in watch.last_error %}
|
||||||
|
<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Detecting-changes-in-images">more help here</a>.
|
||||||
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if watch.last_notification_error is defined and watch.last_notification_error != False %}
|
{% if watch.last_notification_error is defined and watch.last_notification_error != False %}
|
||||||
@@ -123,7 +133,7 @@
|
|||||||
<div class="ldjson-price-track-offer">Embedded price data detected, follow only price data? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
|
<div class="ldjson-price-track-offer">Embedded price data detected, follow only price data? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if watch['track_ldjson_price_data'] == 'accepted' %}
|
{% if watch['track_ldjson_price_data'] == 'accepted' %}
|
||||||
<span class="tracking-ldjson-price-data" title="Automatically following embedded price information"><img src="{{url_for('static_content', group='images', filename='price-tag-icon.svg')}}" class="status-icon price-follow-tag-icon"/> Price</span>
|
<span class="tracking-ldjson-price-data" title="Automatically following embedded price information"><img src="{{url_for('static_content', group='images', filename='price-tag-icon.svg')}}" class="status-icon price-follow-tag-icon" > Price</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
@@ -135,11 +145,14 @@
|
|||||||
{% else %}
|
{% else %}
|
||||||
Not yet checked
|
Not yet checked
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if not active_tag %}
|
|
||||||
<span class="watch-tag-list">{{ watch.tag}}</span>
|
{% for watch_tag_uuid, watch_tag in datastore.get_all_tags_for_watch(watch['uuid']).items() %}
|
||||||
{% endif %}
|
<span class="watch-tag-list">{{ watch_tag.title }}</span>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
<td class="last-checked">{{watch|format_last_checked_time|safe}}</td>
|
<td class="last-checked">{{watch|format_last_checked_time|safe}}</td>
|
||||||
<td class="last-changed">{% if watch.history_n >=2 and watch.last_changed >0 %}
|
<td class="last-changed">{% if watch.history_n >=2 and watch.last_changed >0 %}
|
||||||
@@ -172,16 +185,13 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
<li>
|
<li>
|
||||||
<a href="{{ url_for('form_watch_checknow', tag=active_tag) }}" class="pure-button button-tag ">Recheck
|
<a href="{{ url_for('form_watch_checknow', tag=active_tag) }}" class="pure-button button-tag ">Recheck
|
||||||
all {% if active_tag%}in "{{active_tag}}"{%endif%}</a>
|
all {% if active_tag%} in "{{tags[active_tag].title}}"{%endif%}</a>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<a href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='Generic_Feed-icon.svg')}}" height="15"></a>
|
<a href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='Generic_Feed-icon.svg')}}" height="15"></a>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
{# WIP for pagination, disabled for now
|
{{ pagination.links }}
|
||||||
{{ pagination(sorted_watches,3, pagination_page) }}
|
|
||||||
#}
|
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -14,13 +14,16 @@ global app
|
|||||||
|
|
||||||
def cleanup(datastore_path):
|
def cleanup(datastore_path):
|
||||||
# Unlink test output files
|
# Unlink test output files
|
||||||
files = ['output.txt',
|
files = [
|
||||||
'url-watches.json',
|
'count.txt',
|
||||||
'secret.txt',
|
'endpoint-content.txt'
|
||||||
'notification.txt',
|
'headers.txt',
|
||||||
'count.txt',
|
'headers-testtag.txt',
|
||||||
'endpoint-content.txt'
|
'notification.txt',
|
||||||
]
|
'secret.txt',
|
||||||
|
'url-watches.json',
|
||||||
|
'output.txt',
|
||||||
|
]
|
||||||
for file in files:
|
for file in files:
|
||||||
try:
|
try:
|
||||||
os.unlink("{}/{}".format(datastore_path, file))
|
os.unlink("{}/{}".format(datastore_path, file))
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ def test_preferred_proxy(client, live_server):
|
|||||||
"fetch_backend": "html_requests",
|
"fetch_backend": "html_requests",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"proxy": "proxy-two",
|
"proxy": "proxy-two",
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"url": url,
|
"url": url,
|
||||||
},
|
},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
|
|||||||
77
changedetectionio/tests/proxy_list/test_noproxy.py
Normal file
77
changedetectionio/tests/proxy_list/test_noproxy.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
import time
|
||||||
|
from flask import url_for
|
||||||
|
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||||
|
|
||||||
|
|
||||||
|
def test_noproxy_option(client, live_server):
|
||||||
|
live_server_setup(live_server)
|
||||||
|
# Run by run_proxy_tests.sh
|
||||||
|
# Call this URL then scan the containers that it never went through them
|
||||||
|
url = "http://noproxy.changedetection.io"
|
||||||
|
|
||||||
|
# Should only be available when a proxy is setup
|
||||||
|
res = client.get(
|
||||||
|
url_for("edit_page", uuid="first", unpause_on_save=1))
|
||||||
|
assert b'No proxy' not in res.data
|
||||||
|
|
||||||
|
# Setup a proxy
|
||||||
|
res = client.post(
|
||||||
|
url_for("settings_page"),
|
||||||
|
data={
|
||||||
|
"requests-time_between_check-minutes": 180,
|
||||||
|
"application-ignore_whitespace": "y",
|
||||||
|
"application-fetch_backend": "html_requests",
|
||||||
|
"requests-extra_proxies-0-proxy_name": "custom-one-proxy",
|
||||||
|
"requests-extra_proxies-0-proxy_url": "http://test:awesome@squid-one:3128",
|
||||||
|
"requests-extra_proxies-1-proxy_name": "custom-two-proxy",
|
||||||
|
"requests-extra_proxies-1-proxy_url": "http://test:awesome@squid-two:3128",
|
||||||
|
"requests-extra_proxies-2-proxy_name": "custom-proxy",
|
||||||
|
"requests-extra_proxies-2-proxy_url": "http://test:awesome@squid-custom:3128",
|
||||||
|
},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"Settings updated." in res.data
|
||||||
|
|
||||||
|
# Should be available as an option
|
||||||
|
res = client.get(
|
||||||
|
url_for("settings_page", unpause_on_save=1))
|
||||||
|
assert b'No proxy' in res.data
|
||||||
|
|
||||||
|
|
||||||
|
# This will add it paused
|
||||||
|
res = client.post(
|
||||||
|
url_for("form_quick_watch_add"),
|
||||||
|
data={"url": url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||||
|
uuid = extract_UUID_from_client(client)
|
||||||
|
res = client.get(
|
||||||
|
url_for("edit_page", uuid=uuid, unpause_on_save=1))
|
||||||
|
assert b'No proxy' in res.data
|
||||||
|
|
||||||
|
res = client.post(
|
||||||
|
url_for("edit_page", uuid=uuid, unpause_on_save=1),
|
||||||
|
data={
|
||||||
|
"include_filters": "",
|
||||||
|
"fetch_backend": "html_requests",
|
||||||
|
"headers": "",
|
||||||
|
"proxy": "no-proxy",
|
||||||
|
"tags": "",
|
||||||
|
"url": url,
|
||||||
|
},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"unpaused" in res.data
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
# Now the request should NOT appear in the second-squid logs (handled by the run_test_proxies.sh script)
|
||||||
|
|
||||||
|
# Prove that it actually checked
|
||||||
|
|
||||||
|
assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] != 0
|
||||||
|
|
||||||
@@ -77,7 +77,7 @@ def test_restock_detection(client, live_server):
|
|||||||
|
|
||||||
client.post(
|
client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": test_url, "tag": '', 'processor': 'restock_diff'},
|
data={"url": test_url, "tags": '', 'processor': 'restock_diff'},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
42
changedetectionio/tests/smtp/smtp-test-server.py
Executable file
42
changedetectionio/tests/smtp/smtp-test-server.py
Executable file
@@ -0,0 +1,42 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
import smtpd
|
||||||
|
import asyncore
|
||||||
|
|
||||||
|
# Accept a SMTP message and offer a way to retrieve the last message via TCP Socket
|
||||||
|
|
||||||
|
last_received_message = b"Nothing"
|
||||||
|
|
||||||
|
|
||||||
|
class CustomSMTPServer(smtpd.SMTPServer):
|
||||||
|
|
||||||
|
def process_message(self, peer, mailfrom, rcpttos, data, **kwargs):
|
||||||
|
global last_received_message
|
||||||
|
last_received_message = data
|
||||||
|
print('Receiving message from:', peer)
|
||||||
|
print('Message addressed from:', mailfrom)
|
||||||
|
print('Message addressed to :', rcpttos)
|
||||||
|
print('Message length :', len(data))
|
||||||
|
print(data.decode('utf8'))
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
# Just print out the last message received on plain TCP socket server
|
||||||
|
class EchoServer(asyncore.dispatcher):
|
||||||
|
|
||||||
|
def __init__(self, host, port):
|
||||||
|
asyncore.dispatcher.__init__(self)
|
||||||
|
self.create_socket()
|
||||||
|
self.set_reuse_addr()
|
||||||
|
self.bind((host, port))
|
||||||
|
self.listen(5)
|
||||||
|
|
||||||
|
def handle_accepted(self, sock, addr):
|
||||||
|
global last_received_message
|
||||||
|
print('Incoming connection from %s' % repr(addr))
|
||||||
|
sock.send(last_received_message)
|
||||||
|
last_received_message = b''
|
||||||
|
|
||||||
|
|
||||||
|
server = CustomSMTPServer(('0.0.0.0', 11025), None) # SMTP mail goes here
|
||||||
|
server2 = EchoServer('0.0.0.0', 11080) # Echo back last message received
|
||||||
|
asyncore.loop()
|
||||||
165
changedetectionio/tests/smtp/test_notification_smtp.py
Normal file
165
changedetectionio/tests/smtp/test_notification_smtp.py
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import re
|
||||||
|
from flask import url_for
|
||||||
|
from changedetectionio.tests.util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup, \
|
||||||
|
wait_for_all_checks, \
|
||||||
|
set_longer_modified_response
|
||||||
|
from changedetectionio.tests.util import extract_UUID_from_client
|
||||||
|
import logging
|
||||||
|
import base64
|
||||||
|
|
||||||
|
# NOTE - RELIES ON mailserver as hostname running, see github build recipes
|
||||||
|
smtp_test_server = 'mailserver'
|
||||||
|
|
||||||
|
from changedetectionio.notification import (
|
||||||
|
default_notification_body,
|
||||||
|
default_notification_format,
|
||||||
|
default_notification_title,
|
||||||
|
valid_notification_formats,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_setup(live_server):
|
||||||
|
live_server_setup(live_server)
|
||||||
|
|
||||||
|
def get_last_message_from_smtp_server():
|
||||||
|
import socket
|
||||||
|
global smtp_test_server
|
||||||
|
port = 11080 # socket server port number
|
||||||
|
|
||||||
|
client_socket = socket.socket() # instantiate
|
||||||
|
client_socket.connect((smtp_test_server, port)) # connect to the server
|
||||||
|
|
||||||
|
data = client_socket.recv(50024).decode() # receive response
|
||||||
|
client_socket.close() # close the connection
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
# Requires running the test SMTP server
|
||||||
|
|
||||||
|
def test_check_notification_email_formats_default_HTML(client, live_server):
|
||||||
|
# live_server_setup(live_server)
|
||||||
|
set_original_response()
|
||||||
|
|
||||||
|
global smtp_test_server
|
||||||
|
notification_url = f'mailto://changedetection@{smtp_test_server}:11025/?to=fff@home.com'
|
||||||
|
|
||||||
|
#####################
|
||||||
|
# Set this up for when we remove the notification from the watch, it should fallback with these details
|
||||||
|
res = client.post(
|
||||||
|
url_for("settings_page"),
|
||||||
|
data={"application-notification_urls": notification_url,
|
||||||
|
"application-notification_title": "fallback-title " + default_notification_title,
|
||||||
|
"application-notification_body": "fallback-body<br> " + default_notification_body,
|
||||||
|
"application-notification_format": 'HTML',
|
||||||
|
"requests-time_between_check-minutes": 180,
|
||||||
|
'application-fetch_backend': "html_requests"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Settings updated." in res.data
|
||||||
|
|
||||||
|
# Add a watch and trigger a HTTP POST
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("form_quick_watch_add"),
|
||||||
|
data={"url": test_url, "tags": 'nice one'},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
set_longer_modified_response()
|
||||||
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
time.sleep(3)
|
||||||
|
|
||||||
|
msg = get_last_message_from_smtp_server()
|
||||||
|
assert len(msg) >= 1
|
||||||
|
|
||||||
|
# The email should have two bodies, and the text/html part should be <br>
|
||||||
|
assert 'Content-Type: text/plain' in msg
|
||||||
|
assert '(added) So let\'s see what happens.\n' in msg # The plaintext part with \n
|
||||||
|
assert 'Content-Type: text/html' in msg
|
||||||
|
assert '(added) So let\'s see what happens.<br>' in msg # the html part
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_notification_email_formats_default_Text_override_HTML(client, live_server):
|
||||||
|
# live_server_setup(live_server)
|
||||||
|
|
||||||
|
# HTML problems? see this
|
||||||
|
# https://github.com/caronc/apprise/issues/633
|
||||||
|
|
||||||
|
set_original_response()
|
||||||
|
global smtp_test_server
|
||||||
|
notification_url = f'mailto://changedetection@{smtp_test_server}:11025/?to=fff@home.com'
|
||||||
|
|
||||||
|
#####################
|
||||||
|
# Set this up for when we remove the notification from the watch, it should fallback with these details
|
||||||
|
res = client.post(
|
||||||
|
url_for("settings_page"),
|
||||||
|
data={"application-notification_urls": notification_url,
|
||||||
|
"application-notification_title": "fallback-title " + default_notification_title,
|
||||||
|
"application-notification_body": default_notification_body,
|
||||||
|
"application-notification_format": 'Text',
|
||||||
|
"requests-time_between_check-minutes": 180,
|
||||||
|
'application-fetch_backend': "html_requests"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Settings updated." in res.data
|
||||||
|
|
||||||
|
# Add a watch and trigger a HTTP POST
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("form_quick_watch_add"),
|
||||||
|
data={"url": test_url, "tags": 'nice one'},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
set_longer_modified_response()
|
||||||
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
time.sleep(3)
|
||||||
|
msg = get_last_message_from_smtp_server()
|
||||||
|
assert len(msg) >= 1
|
||||||
|
# with open('/tmp/m.txt', 'w') as f:
|
||||||
|
# f.write(msg)
|
||||||
|
|
||||||
|
# The email should not have two bodies, should be TEXT only
|
||||||
|
|
||||||
|
assert 'Content-Type: text/plain' in msg
|
||||||
|
assert '(added) So let\'s see what happens.\n' in msg # The plaintext part with \n
|
||||||
|
|
||||||
|
set_original_response()
|
||||||
|
# Now override as HTML format
|
||||||
|
res = client.post(
|
||||||
|
url_for("edit_page", uuid="first"),
|
||||||
|
data={
|
||||||
|
"url": test_url,
|
||||||
|
"notification_format": 'HTML',
|
||||||
|
'fetch_backend': "html_requests"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Updated watch." in res.data
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
time.sleep(3)
|
||||||
|
msg = get_last_message_from_smtp_server()
|
||||||
|
assert len(msg) >= 1
|
||||||
|
|
||||||
|
# The email should have two bodies, and the text/html part should be <br>
|
||||||
|
assert 'Content-Type: text/plain' in msg
|
||||||
|
assert '(removed) So let\'s see what happens.\n' in msg # The plaintext part with \n
|
||||||
|
assert 'Content-Type: text/html' in msg
|
||||||
|
assert '(removed) So let\'s see what happens.<br>' in msg # the html part
|
||||||
|
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
@@ -45,6 +45,15 @@ def test_check_access_control(app, client, live_server):
|
|||||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||||
assert b'Random content' in res.data
|
assert b'Random content' in res.data
|
||||||
|
|
||||||
|
# Check wrong password does not let us in
|
||||||
|
res = c.post(
|
||||||
|
url_for("login"),
|
||||||
|
data={"password": "WRONG PASSWORD"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"LOG OUT" not in res.data
|
||||||
|
assert b"Incorrect password" in res.data
|
||||||
|
|
||||||
|
|
||||||
# Menu should not be available yet
|
# Menu should not be available yet
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from .util import live_server_setup
|
from .util import live_server_setup, wait_for_all_checks
|
||||||
from changedetectionio import html_tools
|
from changedetectionio import html_tools
|
||||||
|
|
||||||
|
|
||||||
@@ -39,7 +39,6 @@ def test_setup(client, live_server):
|
|||||||
live_server_setup(live_server)
|
live_server_setup(live_server)
|
||||||
|
|
||||||
def test_check_removed_line_contains_trigger(client, live_server):
|
def test_check_removed_line_contains_trigger(client, live_server):
|
||||||
sleep_time_for_fetch_thread = 3
|
|
||||||
|
|
||||||
# Give the endpoint time to spin up
|
# Give the endpoint time to spin up
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@@ -54,7 +53,7 @@ def test_check_removed_line_contains_trigger(client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
@@ -67,20 +66,20 @@ def test_check_removed_line_contains_trigger(client, live_server):
|
|||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
set_original(excluding='Something irrelevant')
|
set_original(excluding='Something irrelevant')
|
||||||
|
|
||||||
# A line thats not the trigger should not trigger anything
|
# A line thats not the trigger should not trigger anything
|
||||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
assert b'1 watches queued for rechecking.' in res.data
|
assert b'1 watches queued for rechecking.' in res.data
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' not in res.data
|
assert b'unviewed' not in res.data
|
||||||
|
|
||||||
# The trigger line is REMOVED, this should trigger
|
# The trigger line is REMOVED, this should trigger
|
||||||
set_original(excluding='The golden line')
|
set_original(excluding='The golden line')
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
|
|
||||||
@@ -89,14 +88,14 @@ def test_check_removed_line_contains_trigger(client, live_server):
|
|||||||
client.get(url_for("mark_all_viewed"), follow_redirects=True)
|
client.get(url_for("mark_all_viewed"), follow_redirects=True)
|
||||||
set_original(excluding=None)
|
set_original(excluding=None)
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' not in res.data
|
assert b'unviewed' not in res.data
|
||||||
|
|
||||||
# Remove it again, and we should get a trigger
|
# Remove it again, and we should get a trigger
|
||||||
set_original(excluding='The golden line')
|
set_original(excluding='The golden line')
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
|
|
||||||
@@ -105,8 +104,7 @@ def test_check_removed_line_contains_trigger(client, live_server):
|
|||||||
|
|
||||||
|
|
||||||
def test_check_add_line_contains_trigger(client, live_server):
|
def test_check_add_line_contains_trigger(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
sleep_time_for_fetch_thread = 3
|
|
||||||
|
|
||||||
# Give the endpoint time to spin up
|
# Give the endpoint time to spin up
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@@ -136,8 +134,7 @@ def test_check_add_line_contains_trigger(client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
@@ -150,23 +147,25 @@ def test_check_add_line_contains_trigger(client, live_server):
|
|||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
set_original(excluding='Something irrelevant')
|
set_original(excluding='Something irrelevant')
|
||||||
|
|
||||||
# A line thats not the trigger should not trigger anything
|
# A line thats not the trigger should not trigger anything
|
||||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
assert b'1 watches queued for rechecking.' in res.data
|
assert b'1 watches queued for rechecking.' in res.data
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' not in res.data
|
assert b'unviewed' not in res.data
|
||||||
|
|
||||||
# The trigger line is ADDED, this should trigger
|
# The trigger line is ADDED, this should trigger
|
||||||
set_original(add_line='<p>Oh yes please</p>')
|
set_original(add_line='<p>Oh yes please</p>')
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
|
|
||||||
|
# Takes a moment for apprise to fire
|
||||||
|
time.sleep(3)
|
||||||
with open("test-datastore/notification.txt", 'r') as f:
|
with open("test-datastore/notification.txt", 'r') as f:
|
||||||
response= f.read()
|
response= f.read()
|
||||||
assert '-Oh yes please-' in response
|
assert '-Oh yes please-' in response
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from .util import live_server_setup, extract_api_key_from_UI
|
from .util import live_server_setup, extract_api_key_from_UI, wait_for_all_checks
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import uuid
|
import uuid
|
||||||
@@ -57,6 +57,7 @@ def test_setup(client, live_server):
|
|||||||
live_server_setup(live_server)
|
live_server_setup(live_server)
|
||||||
|
|
||||||
def test_api_simple(client, live_server):
|
def test_api_simple(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
api_key = extract_api_key_from_UI(client)
|
api_key = extract_api_key_from_UI(client)
|
||||||
|
|
||||||
@@ -86,7 +87,7 @@ def test_api_simple(client, live_server):
|
|||||||
watch_uuid = res.json.get('uuid')
|
watch_uuid = res.json.get('uuid')
|
||||||
assert res.status_code == 201
|
assert res.status_code == 201
|
||||||
|
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Verify its in the list and that recheck worked
|
# Verify its in the list and that recheck worked
|
||||||
res = client.get(
|
res = client.get(
|
||||||
@@ -107,7 +108,7 @@ def test_api_simple(client, live_server):
|
|||||||
)
|
)
|
||||||
assert len(res.json) == 0
|
assert len(res.json) == 0
|
||||||
|
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
set_modified_response()
|
set_modified_response()
|
||||||
# Trigger recheck of all ?recheck_all=1
|
# Trigger recheck of all ?recheck_all=1
|
||||||
@@ -115,7 +116,7 @@ def test_api_simple(client, live_server):
|
|||||||
url_for("createwatch", recheck_all='1'),
|
url_for("createwatch", recheck_all='1'),
|
||||||
headers={'x-api-key': api_key},
|
headers={'x-api-key': api_key},
|
||||||
)
|
)
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Did the recheck fire?
|
# Did the recheck fire?
|
||||||
res = client.get(
|
res = client.get(
|
||||||
@@ -266,7 +267,7 @@ def test_api_watch_PUT_update(client, live_server):
|
|||||||
|
|
||||||
#live_server_setup(live_server)
|
#live_server_setup(live_server)
|
||||||
api_key = extract_api_key_from_UI(client)
|
api_key = extract_api_key_from_UI(client)
|
||||||
time.sleep(1)
|
|
||||||
# Create a watch
|
# Create a watch
|
||||||
set_original_response()
|
set_original_response()
|
||||||
test_url = url_for('test_endpoint', _external=True,
|
test_url = url_for('test_endpoint', _external=True,
|
||||||
@@ -282,7 +283,6 @@ def test_api_watch_PUT_update(client, live_server):
|
|||||||
|
|
||||||
assert res.status_code == 201
|
assert res.status_code == 201
|
||||||
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
# Get a listing, it will be the first one
|
# Get a listing, it will be the first one
|
||||||
res = client.get(
|
res = client.get(
|
||||||
@@ -297,6 +297,8 @@ def test_api_watch_PUT_update(client, live_server):
|
|||||||
url_for("edit_page", uuid=watch_uuid),
|
url_for("edit_page", uuid=watch_uuid),
|
||||||
)
|
)
|
||||||
assert b"cookie: yum" in res.data, "'cookie: yum' found in 'headers' section"
|
assert b"cookie: yum" in res.data, "'cookie: yum' found in 'headers' section"
|
||||||
|
assert b"One" in res.data, "Tag 'One' was found"
|
||||||
|
assert b"Two" in res.data, "Tag 'Two' was found"
|
||||||
|
|
||||||
# HTTP PUT ( UPDATE an existing watch )
|
# HTTP PUT ( UPDATE an existing watch )
|
||||||
res = client.put(
|
res = client.put(
|
||||||
@@ -319,7 +321,8 @@ def test_api_watch_PUT_update(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"new title" in res.data, "new title found in edit page"
|
assert b"new title" in res.data, "new title found in edit page"
|
||||||
assert b"552" in res.data, "552 minutes found in edit page"
|
assert b"552" in res.data, "552 minutes found in edit page"
|
||||||
assert b"One, Two" in res.data, "Tag 'One, Two' was found"
|
assert b"One" in res.data, "Tag 'One' was found"
|
||||||
|
assert b"Two" in res.data, "Tag 'Two' was found"
|
||||||
assert b"cookie: all eaten" in res.data, "'cookie: all eaten' found in 'headers' section"
|
assert b"cookie: all eaten" in res.data, "'cookie: all eaten' found in 'headers' section"
|
||||||
|
|
||||||
######################################################
|
######################################################
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ def test_basic_auth(client, live_server):
|
|||||||
# Check form validation
|
# Check form validation
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": "", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
data={"include_filters": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
|
|||||||
@@ -2,7 +2,8 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from .util import live_server_setup, extract_UUID_from_client, extract_api_key_from_UI
|
from .util import live_server_setup, extract_UUID_from_client, extract_api_key_from_UI, wait_for_all_checks
|
||||||
|
|
||||||
|
|
||||||
def set_response_with_ldjson():
|
def set_response_with_ldjson():
|
||||||
test_return_data = """<html>
|
test_return_data = """<html>
|
||||||
@@ -27,7 +28,7 @@ def set_response_with_ldjson():
|
|||||||
"description":"You dont need it",
|
"description":"You dont need it",
|
||||||
"mpn":"111111",
|
"mpn":"111111",
|
||||||
"sku":"22222",
|
"sku":"22222",
|
||||||
"offers":{
|
"Offers":{
|
||||||
"@type":"AggregateOffer",
|
"@type":"AggregateOffer",
|
||||||
"lowPrice":8097000,
|
"lowPrice":8097000,
|
||||||
"highPrice":8099900,
|
"highPrice":8099900,
|
||||||
@@ -75,12 +76,11 @@ def set_response_without_ldjson():
|
|||||||
f.write(test_return_data)
|
f.write(test_return_data)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# actually only really used by the distll.io importer, but could be handy too
|
def test_setup(client, live_server):
|
||||||
def test_check_ldjson_price_autodetect(client, live_server):
|
|
||||||
live_server_setup(live_server)
|
live_server_setup(live_server)
|
||||||
|
|
||||||
# Give the endpoint time to spin up
|
# actually only really used by the distll.io importer, but could be handy too
|
||||||
time.sleep(1)
|
def test_check_ldjson_price_autodetect(client, live_server):
|
||||||
|
|
||||||
set_response_with_ldjson()
|
set_response_with_ldjson()
|
||||||
|
|
||||||
@@ -92,7 +92,7 @@ def test_check_ldjson_price_autodetect(client, live_server):
|
|||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Should get a notice that it's available
|
# Should get a notice that it's available
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -102,11 +102,11 @@ def test_check_ldjson_price_autodetect(client, live_server):
|
|||||||
uuid = extract_UUID_from_client(client)
|
uuid = extract_UUID_from_client(client)
|
||||||
|
|
||||||
client.get(url_for('price_data_follower.accept', uuid=uuid, follow_redirects=True))
|
client.get(url_for('price_data_follower.accept', uuid=uuid, follow_redirects=True))
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
# Offer should be gone
|
# Offer should be gone
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'Embedded price data' not in res.data
|
assert b'Embedded price data' not in res.data
|
||||||
@@ -138,9 +138,97 @@ def test_check_ldjson_price_autodetect(client, live_server):
|
|||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'ldjson-price-track-offer' not in res.data
|
assert b'ldjson-price-track-offer' not in res.data
|
||||||
|
|
||||||
##########################################################################################
|
##########################################################################################
|
||||||
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
|
||||||
|
|
||||||
|
def _test_runner_check_bad_format_ignored(live_server, client, has_ldjson_price_data):
|
||||||
|
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": test_url},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"1 Imported" in res.data
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
for k,v in client.application.config.get('DATASTORE').data['watching'].items():
|
||||||
|
assert v.get('last_error') == False
|
||||||
|
assert v.get('has_ldjson_price_data') == has_ldjson_price_data
|
||||||
|
|
||||||
|
|
||||||
|
##########################################################################################
|
||||||
|
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_bad_ldjson_is_correctly_ignored(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
test_return_data = """
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<script type="application/ld+json">
|
||||||
|
{
|
||||||
|
"@context": "http://schema.org",
|
||||||
|
"@type": ["Product", "SubType"],
|
||||||
|
"name": "My test product",
|
||||||
|
"description": "",
|
||||||
|
"offers": {
|
||||||
|
"note" : "You can see the case-insensitive OffERS key, it should work",
|
||||||
|
"@type": "Offer",
|
||||||
|
"offeredBy": {
|
||||||
|
"@type": "Organization",
|
||||||
|
"name":"Person",
|
||||||
|
"telephone":"+1 999 999 999"
|
||||||
|
},
|
||||||
|
"price": "1",
|
||||||
|
"priceCurrency": "EUR",
|
||||||
|
"url": "/some/url"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="yes">Some extra stuff</div>
|
||||||
|
</body></html>
|
||||||
|
"""
|
||||||
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
|
f.write(test_return_data)
|
||||||
|
|
||||||
|
_test_runner_check_bad_format_ignored(live_server=live_server, client=client, has_ldjson_price_data=True)
|
||||||
|
test_return_data = """
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<script type="application/ld+json">
|
||||||
|
{
|
||||||
|
"@context": "http://schema.org",
|
||||||
|
"@type": ["Product", "SubType"],
|
||||||
|
"name": "My test product",
|
||||||
|
"description": "",
|
||||||
|
"BrokenOffers": {
|
||||||
|
"@type": "Offer",
|
||||||
|
"offeredBy": {
|
||||||
|
"@type": "Organization",
|
||||||
|
"name":"Person",
|
||||||
|
"telephone":"+1 999 999 999"
|
||||||
|
},
|
||||||
|
"price": "1",
|
||||||
|
"priceCurrency": "EUR",
|
||||||
|
"url": "/some/url"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="yes">Some extra stuff</div>
|
||||||
|
</body></html>
|
||||||
|
"""
|
||||||
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
|
f.write(test_return_data)
|
||||||
|
|
||||||
|
_test_runner_check_bad_format_ignored(live_server=live_server, client=client, has_ldjson_price_data=False)
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/python3
|
#!/usr/bin/python3
|
||||||
|
|
||||||
from .util import set_original_response, set_modified_response, live_server_setup
|
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from urllib.request import urlopen
|
from urllib.request import urlopen
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
@@ -19,12 +19,12 @@ def test_backup(client, live_server):
|
|||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("import_page"),
|
url_for("import_page"),
|
||||||
data={"urls": url_for('test_endpoint', _external=True)},
|
data={"urls": url_for('test_endpoint', _external=True)+"?somechar=őőőőőőőő"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
res = client.get(
|
res = client.get(
|
||||||
url_for("get_backup"),
|
url_for("get_backup"),
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from . util import live_server_setup
|
from .util import live_server_setup, wait_for_all_checks
|
||||||
from changedetectionio import html_tools
|
from changedetectionio import html_tools
|
||||||
|
|
||||||
def set_original_ignore_response():
|
def set_original_ignore_response():
|
||||||
@@ -61,7 +61,7 @@ def set_modified_response_minus_block_text():
|
|||||||
|
|
||||||
|
|
||||||
def test_check_block_changedetection_text_NOT_present(client, live_server):
|
def test_check_block_changedetection_text_NOT_present(client, live_server):
|
||||||
sleep_time_for_fetch_thread = 3
|
|
||||||
live_server_setup(live_server)
|
live_server_setup(live_server)
|
||||||
# Use a mix of case in ZzZ to prove it works case-insensitive.
|
# Use a mix of case in ZzZ to prove it works case-insensitive.
|
||||||
ignore_text = "out of stoCk\r\nfoobar"
|
ignore_text = "out of stoCk\r\nfoobar"
|
||||||
@@ -81,7 +81,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
@@ -96,7 +96,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
|||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
# Check it saved
|
# Check it saved
|
||||||
res = client.get(
|
res = client.get(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
@@ -107,7 +107,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
|||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should report nothing found (no new 'unviewed' class)
|
# It should report nothing found (no new 'unviewed' class)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -120,7 +120,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
|||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should report nothing found (no new 'unviewed' class)
|
# It should report nothing found (no new 'unviewed' class)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -131,7 +131,7 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
|
|||||||
# Now we set a change where the text is gone, it should now trigger
|
# Now we set a change where the text is gone, it should now trigger
|
||||||
set_modified_response_minus_block_text()
|
set_modified_response_minus_block_text()
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from . util import live_server_setup
|
from .util import live_server_setup, wait_for_all_checks
|
||||||
|
|
||||||
from ..html_tools import *
|
from ..html_tools import *
|
||||||
|
|
||||||
@@ -96,7 +96,7 @@ def test_check_markup_include_filters_restriction(client, live_server):
|
|||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": include_filters, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
data={"include_filters": include_filters, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
@@ -157,7 +157,7 @@ def test_check_multiple_filters(client, live_server):
|
|||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": include_filters,
|
data={"include_filters": include_filters,
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
'fetch_backend': "html_requests"},
|
'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
@@ -176,3 +176,77 @@ def test_check_multiple_filters(client, live_server):
|
|||||||
assert b"Blob A" in res.data # CSS was ok
|
assert b"Blob A" in res.data # CSS was ok
|
||||||
assert b"Blob B" in res.data # xPath was ok
|
assert b"Blob B" in res.data # xPath was ok
|
||||||
assert b"Blob C" not in res.data # Should not be included
|
assert b"Blob C" not in res.data # Should not be included
|
||||||
|
|
||||||
|
# The filter exists, but did not contain anything useful
|
||||||
|
# Mainly used when the filter contains just an IMG, this can happen when someone selects an image in the visual-selector
|
||||||
|
# Tests fetcher can throw a "ReplyWithContentButNoText" exception after applying filter and extracting text
|
||||||
|
def test_filter_is_empty_help_suggestion(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
|
include_filters = "#blob-a"
|
||||||
|
|
||||||
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
|
f.write("""<html><body>
|
||||||
|
<div id="blob-a">
|
||||||
|
<img src="something.jpg">
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
# Add our URL to the import page
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": test_url},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"1 Imported" in res.data
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
# Goto the edit page, add our ignore text
|
||||||
|
# Add our URL to the import page
|
||||||
|
res = client.post(
|
||||||
|
url_for("edit_page", uuid="first"),
|
||||||
|
data={"include_filters": include_filters,
|
||||||
|
"url": test_url,
|
||||||
|
"tags": "",
|
||||||
|
"headers": "",
|
||||||
|
'fetch_backend': "html_requests"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Updated watch." in res.data
|
||||||
|
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
|
||||||
|
res = client.get(
|
||||||
|
url_for("index"),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b'empty result or contain only an image' in res.data
|
||||||
|
|
||||||
|
|
||||||
|
### Just an empty selector, no image
|
||||||
|
|
||||||
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
|
f.write("""<html><body>
|
||||||
|
<div id="blob-a">
|
||||||
|
<!-- doo doo -->
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
""")
|
||||||
|
|
||||||
|
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
res = client.get(
|
||||||
|
url_for("index"),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b'empty result or contain only an image' not in res.data
|
||||||
|
assert b'but contained no usable text' in res.data
|
||||||
|
|||||||
@@ -129,7 +129,7 @@ def test_element_removal_full(client, live_server):
|
|||||||
data={
|
data={
|
||||||
"subtractive_selectors": subtractive_selectors_data,
|
"subtractive_selectors": subtractive_selectors_data,
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"fetch_backend": "html_requests",
|
"fetch_backend": "html_requests",
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -59,6 +59,8 @@ def test_http_error_handler(client, live_server):
|
|||||||
_runner_test_http_errors(client, live_server, 404, 'Page not found')
|
_runner_test_http_errors(client, live_server, 404, 'Page not found')
|
||||||
_runner_test_http_errors(client, live_server, 500, '(Internal server Error) received')
|
_runner_test_http_errors(client, live_server, 500, '(Internal server Error) received')
|
||||||
_runner_test_http_errors(client, live_server, 400, 'Error - Request returned a HTTP error code 400')
|
_runner_test_http_errors(client, live_server, 400, 'Error - Request returned a HTTP error code 400')
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
# Just to be sure error text is properly handled
|
# Just to be sure error text is properly handled
|
||||||
def test_DNS_errors(client, live_server):
|
def test_DNS_errors(client, live_server):
|
||||||
@@ -81,4 +83,48 @@ def test_DNS_errors(client, live_server):
|
|||||||
assert found_name_resolution_error
|
assert found_name_resolution_error
|
||||||
# Should always record that we tried
|
# Should always record that we tried
|
||||||
assert bytes("just now".encode('utf-8')) in res.data
|
assert bytes("just now".encode('utf-8')) in res.data
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
# Re 1513
|
||||||
|
def test_low_level_errors_clear_correctly(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
# Give the endpoint time to spin up
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
|
f.write("<html><body><div id=here>Hello world</div></body></html>")
|
||||||
|
|
||||||
|
# Add our URL to the import page
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": "https://dfkjasdkfjaidjfsdajfksdajfksdjfDOESNTEXIST.com"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"1 Imported" in res.data
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
|
# We should see the DNS error
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
found_name_resolution_error = b"Temporary failure in name resolution" in res.data or b"Name or service not known" in res.data
|
||||||
|
assert found_name_resolution_error
|
||||||
|
|
||||||
|
# Update with what should work
|
||||||
|
client.post(
|
||||||
|
url_for("edit_page", uuid="first"),
|
||||||
|
data={
|
||||||
|
"url": test_url,
|
||||||
|
"fetch_backend": "html_requests"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Now the error should be gone
|
||||||
|
time.sleep(2)
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
found_name_resolution_error = b"Temporary failure in name resolution" in res.data or b"Name or service not known" in res.data
|
||||||
|
assert not found_name_resolution_error
|
||||||
|
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from .util import live_server_setup
|
from .util import live_server_setup, wait_for_all_checks
|
||||||
|
|
||||||
from ..html_tools import *
|
from ..html_tools import *
|
||||||
|
|
||||||
@@ -55,6 +55,8 @@ def set_multiline_response():
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<div>aaand something lines</div>
|
<div>aaand something lines</div>
|
||||||
|
<br>
|
||||||
|
<div>and this should be</div>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
"""
|
"""
|
||||||
@@ -66,11 +68,10 @@ def set_multiline_response():
|
|||||||
|
|
||||||
|
|
||||||
def test_setup(client, live_server):
|
def test_setup(client, live_server):
|
||||||
|
|
||||||
live_server_setup(live_server)
|
live_server_setup(live_server)
|
||||||
|
|
||||||
def test_check_filter_multiline(client, live_server):
|
def test_check_filter_multiline(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
set_multiline_response()
|
set_multiline_response()
|
||||||
|
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
@@ -82,16 +83,17 @@ def test_check_filter_multiline(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": '',
|
data={"include_filters": '',
|
||||||
'extract_text': '/something.+?6 billion.+?lines/si',
|
# Test a regex and a plaintext
|
||||||
|
'extract_text': '/something.+?6 billion.+?lines/si\r\nand this should be',
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
'fetch_backend': "html_requests"
|
'fetch_backend': "html_requests"
|
||||||
},
|
},
|
||||||
@@ -99,13 +101,19 @@ def test_check_filter_multiline(client, live_server):
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
|
||||||
|
# Issue 1828
|
||||||
|
assert b'not at the start of the expression' not in res.data
|
||||||
|
|
||||||
res = client.get(
|
res = client.get(
|
||||||
url_for("preview_page", uuid="first"),
|
url_for("preview_page", uuid="first"),
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
# Plaintext that doesnt look like a regex should match also
|
||||||
|
assert b'and this should be' in res.data
|
||||||
|
|
||||||
assert b'<div class="">Something' in res.data
|
assert b'<div class="">Something' in res.data
|
||||||
assert b'<div class="">across 6 billion multiple' in res.data
|
assert b'<div class="">across 6 billion multiple' in res.data
|
||||||
@@ -115,14 +123,11 @@ def test_check_filter_multiline(client, live_server):
|
|||||||
assert b'aaand something lines' not in res.data
|
assert b'aaand something lines' not in res.data
|
||||||
|
|
||||||
def test_check_filter_and_regex_extract(client, live_server):
|
def test_check_filter_and_regex_extract(client, live_server):
|
||||||
sleep_time_for_fetch_thread = 3
|
|
||||||
include_filters = ".changetext"
|
include_filters = ".changetext"
|
||||||
|
|
||||||
set_original_response()
|
set_original_response()
|
||||||
|
|
||||||
# Give the endpoint time to spin up
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
test_url = url_for('test_endpoint', _external=True)
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
res = client.post(
|
res = client.post(
|
||||||
@@ -132,21 +137,17 @@ def test_check_filter_and_regex_extract(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(1)
|
|
||||||
# Trigger a check
|
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": include_filters,
|
data={"include_filters": include_filters,
|
||||||
'extract_text': '\d+ online\r\n\d+ guests\r\n/somecase insensitive \d+/i\r\n/somecase insensitive (345\d)/i',
|
'extract_text': '/\d+ online/\r\n/\d+ guests/\r\n/somecase insensitive \d+/i\r\n/somecase insensitive (345\d)/i\r\n/issue1828.+?2022/i',
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
'fetch_backend': "html_requests"
|
'fetch_backend': "html_requests"
|
||||||
},
|
},
|
||||||
@@ -155,8 +156,13 @@ def test_check_filter_and_regex_extract(client, live_server):
|
|||||||
|
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
|
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
#issue 1828
|
||||||
|
assert b'not at the start of the expression' not in res.data
|
||||||
|
|
||||||
# Make a change
|
# Make a change
|
||||||
set_modified_response()
|
set_modified_response()
|
||||||
@@ -164,7 +170,7 @@ def test_check_filter_and_regex_extract(client, live_server):
|
|||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should have 'unviewed' still
|
# It should have 'unviewed' still
|
||||||
# Because it should be looking at only that 'sametext' id
|
# Because it should be looking at only that 'sametext' id
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ def set_response_without_filter():
|
|||||||
<p>Which is across multiple lines</p>
|
<p>Which is across multiple lines</p>
|
||||||
<br>
|
<br>
|
||||||
So let's see what happens. <br>
|
So let's see what happens. <br>
|
||||||
<div id="nope-doesnt-exist">Some text thats the same</div>
|
<div id="nope-doesnt-exist">Some text thats the same</div>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
"""
|
"""
|
||||||
@@ -32,7 +32,7 @@ def set_response_with_filter():
|
|||||||
<p>Which is across multiple lines</p>
|
<p>Which is across multiple lines</p>
|
||||||
<br>
|
<br>
|
||||||
So let's see what happens. <br>
|
So let's see what happens. <br>
|
||||||
<div class="ticket-available">Ticket now on sale!</div>
|
<div class="ticket-available">Ticket now on sale!</div>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
"""
|
"""
|
||||||
@@ -56,7 +56,7 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se
|
|||||||
test_url = url_for('test_endpoint', _external=True)
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": test_url, "tag": 'cinema'},
|
data={"url": test_url, "tags": 'cinema'},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Watch added" in res.data
|
assert b"Watch added" in res.data
|
||||||
@@ -84,12 +84,13 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se
|
|||||||
"Snapshot: {{current_snapshot}}\n"
|
"Snapshot: {{current_snapshot}}\n"
|
||||||
"Diff: {{diff}}\n"
|
"Diff: {{diff}}\n"
|
||||||
"Diff Full: {{diff_full}}\n"
|
"Diff Full: {{diff_full}}\n"
|
||||||
|
"Diff as Patch: {{diff_patch}}\n"
|
||||||
":-)",
|
":-)",
|
||||||
"notification_format": "Text"}
|
"notification_format": "Text"}
|
||||||
|
|
||||||
notification_form_data.update({
|
notification_form_data.update({
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "my tag",
|
"tags": "my tag",
|
||||||
"title": "my title",
|
"title": "my title",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"include_filters": '.ticket-available',
|
"include_filters": '.ticket-available',
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from .util import set_original_response, live_server_setup, extract_UUID_from_client
|
from .util import set_original_response, live_server_setup, extract_UUID_from_client, wait_for_all_checks
|
||||||
from changedetectionio.model import App
|
from changedetectionio.model import App
|
||||||
|
|
||||||
|
|
||||||
@@ -12,7 +12,7 @@ def set_response_with_filter():
|
|||||||
<p>Which is across multiple lines</p>
|
<p>Which is across multiple lines</p>
|
||||||
<br>
|
<br>
|
||||||
So let's see what happens. <br>
|
So let's see what happens. <br>
|
||||||
<div id="nope-doesnt-exist">Some text thats the same</div>
|
<div id="nope-doesnt-exist">Some text thats the same</div>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
"""
|
"""
|
||||||
@@ -37,14 +37,14 @@ def run_filter_test(client, content_filter):
|
|||||||
test_url = url_for('test_endpoint', _external=True)
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": test_url, "tag": ''},
|
data={"url": test_url, "tags": ''},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
|
|
||||||
assert b"Watch added" in res.data
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick up the first version
|
# Give the thread time to pick up the first version
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
@@ -66,13 +66,14 @@ def run_filter_test(client, content_filter):
|
|||||||
"Snapshot: {{current_snapshot}}\n"
|
"Snapshot: {{current_snapshot}}\n"
|
||||||
"Diff: {{diff}}\n"
|
"Diff: {{diff}}\n"
|
||||||
"Diff Full: {{diff_full}}\n"
|
"Diff Full: {{diff_full}}\n"
|
||||||
|
"Diff as Patch: {{diff_patch}}\n"
|
||||||
":-)",
|
":-)",
|
||||||
"notification_format": "Text"}
|
"notification_format": "Text"}
|
||||||
|
|
||||||
notification_form_data.update({
|
notification_form_data.update({
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "my tag",
|
"tags": "my tag",
|
||||||
"title": "my title",
|
"title": "my title 123",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"filter_failure_notification_send": 'y',
|
"filter_failure_notification_send": 'y',
|
||||||
"include_filters": content_filter,
|
"include_filters": content_filter,
|
||||||
@@ -85,43 +86,55 @@ def run_filter_test(client, content_filter):
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Now the notification should not exist, because we didnt reach the threshold
|
# Now the notification should not exist, because we didnt reach the threshold
|
||||||
assert not os.path.isfile("test-datastore/notification.txt")
|
assert not os.path.isfile("test-datastore/notification.txt")
|
||||||
|
|
||||||
for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT):
|
# -2 because we would have checked twice above (on adding and on edit)
|
||||||
|
for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT-2):
|
||||||
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
assert not os.path.isfile("test-datastore/notification.txt"), f"test-datastore/notification.txt should not exist - Attempt {i}"
|
||||||
|
|
||||||
# We should see something in the frontend
|
# We should see something in the frontend
|
||||||
assert b'Warning, no filters were found' in res.data
|
assert b'Warning, no filters were found' in res.data
|
||||||
|
|
||||||
|
# One more check should trigger it (see -2 above)
|
||||||
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
# Now it should exist and contain our "filter not found" alert
|
# Now it should exist and contain our "filter not found" alert
|
||||||
assert os.path.isfile("test-datastore/notification.txt")
|
assert os.path.isfile("test-datastore/notification.txt")
|
||||||
notification = False
|
|
||||||
with open("test-datastore/notification.txt", 'r') as f:
|
with open("test-datastore/notification.txt", 'r') as f:
|
||||||
notification = f.read()
|
notification = f.read()
|
||||||
|
|
||||||
assert 'CSS/xPath filter was not present in the page' in notification
|
assert 'CSS/xPath filter was not present in the page' in notification
|
||||||
assert content_filter.replace('"', '\\"') in notification
|
assert content_filter.replace('"', '\\"') in notification
|
||||||
|
|
||||||
# Remove it and prove that it doesnt trigger when not expected
|
# Remove it and prove that it doesn't trigger when not expected
|
||||||
|
# It should register a change, but no 'filter not found'
|
||||||
os.unlink("test-datastore/notification.txt")
|
os.unlink("test-datastore/notification.txt")
|
||||||
set_response_with_filter()
|
set_response_with_filter()
|
||||||
|
|
||||||
|
# Try several times, it should NOT have 'filter not found'
|
||||||
for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT):
|
for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT):
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should have sent a notification, but..
|
# It should have sent a notification, but..
|
||||||
assert os.path.isfile("test-datastore/notification.txt")
|
assert os.path.isfile("test-datastore/notification.txt")
|
||||||
# but it should not contain the info about the failed filter
|
# but it should not contain the info about a failed filter (because there was none in this case)
|
||||||
with open("test-datastore/notification.txt", 'r') as f:
|
with open("test-datastore/notification.txt", 'r') as f:
|
||||||
notification = f.read()
|
notification = f.read()
|
||||||
assert not 'CSS/xPath filter was not present in the page' in notification
|
assert not 'CSS/xPath filter was not present in the page' in notification
|
||||||
|
|
||||||
# Re #1247 - All tokens got replaced
|
# Re #1247 - All tokens got replaced correctly in the notification
|
||||||
|
res = client.get(url_for("index"))
|
||||||
uuid = extract_UUID_from_client(client)
|
uuid = extract_UUID_from_client(client)
|
||||||
|
# UUID is correct, but notification contains tag uuid as UUIID wtf
|
||||||
assert uuid in notification
|
assert uuid in notification
|
||||||
|
|
||||||
# cleanup for the next
|
# cleanup for the next
|
||||||
@@ -137,7 +150,7 @@ def test_setup(live_server):
|
|||||||
|
|
||||||
def test_check_include_filters_failure_notification(client, live_server):
|
def test_check_include_filters_failure_notification(client, live_server):
|
||||||
set_original_response()
|
set_original_response()
|
||||||
time.sleep(1)
|
wait_for_all_checks(client)
|
||||||
run_filter_test(client, '#nope-doesnt-exist')
|
run_filter_test(client, '#nope-doesnt-exist')
|
||||||
|
|
||||||
def test_check_xpath_filter_failure_notification(client, live_server):
|
def test_check_xpath_filter_failure_notification(client, live_server):
|
||||||
|
|||||||
323
changedetectionio/tests/test_group.py
Normal file
323
changedetectionio/tests/test_group.py
Normal file
@@ -0,0 +1,323 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
import time
|
||||||
|
from flask import url_for
|
||||||
|
from .util import live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, get_UUID_for_tag_name, extract_UUID_from_client
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def test_setup(client, live_server):
|
||||||
|
live_server_setup(live_server)
|
||||||
|
|
||||||
|
def set_original_response():
|
||||||
|
test_return_data = """<html>
|
||||||
|
<body>
|
||||||
|
Some initial text<br>
|
||||||
|
<p id="only-this">Should be only this</p>
|
||||||
|
<br>
|
||||||
|
<p id="not-this">And never this</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
|
f.write(test_return_data)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set_modified_response():
|
||||||
|
test_return_data = """<html>
|
||||||
|
<body>
|
||||||
|
Some initial text<br>
|
||||||
|
<p id="only-this">Should be REALLY only this</p>
|
||||||
|
<br>
|
||||||
|
<p id="not-this">And never this</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
|
f.write(test_return_data)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def test_setup_group_tag(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
set_original_response()
|
||||||
|
|
||||||
|
# Add a tag with some config, import a tag and it should roughly work
|
||||||
|
res = client.post(
|
||||||
|
url_for("tags.form_tag_add"),
|
||||||
|
data={"name": "test-tag"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Tag added" in res.data
|
||||||
|
assert b"test-tag" in res.data
|
||||||
|
|
||||||
|
res = client.post(
|
||||||
|
url_for("tags.form_tag_edit_submit", uuid="first"),
|
||||||
|
data={"name": "test-tag",
|
||||||
|
"include_filters": '#only-this',
|
||||||
|
"subtractive_selectors": '#not-this'},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Updated" in res.data
|
||||||
|
tag_uuid = get_UUID_for_tag_name(client, name="test-tag")
|
||||||
|
res = client.get(
|
||||||
|
url_for("tags.form_tag_edit", uuid="first")
|
||||||
|
)
|
||||||
|
assert b"#only-this" in res.data
|
||||||
|
assert b"#not-this" in res.data
|
||||||
|
|
||||||
|
# Tag should be setup and ready, now add a watch
|
||||||
|
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": test_url + "?first-imported=1 test-tag, extra-import-tag"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
assert b'import-tag' in res.data
|
||||||
|
assert b'extra-import-tag' in res.data
|
||||||
|
|
||||||
|
res = client.get(
|
||||||
|
url_for("tags.tags_overview_page"),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b'import-tag' in res.data
|
||||||
|
assert b'extra-import-tag' in res.data
|
||||||
|
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
assert b'Warning, no filters were found' not in res.data
|
||||||
|
|
||||||
|
res = client.get(
|
||||||
|
url_for("preview_page", uuid="first"),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b'Should be only this' in res.data
|
||||||
|
assert b'And never this' not in res.data
|
||||||
|
|
||||||
|
|
||||||
|
# RSS Group tag filter
|
||||||
|
# An extra one that should be excluded
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": test_url + "?should-be-excluded=1 some-tag"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"1 Imported" in res.data
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
set_modified_response()
|
||||||
|
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
rss_token = extract_rss_token_from_UI(client)
|
||||||
|
res = client.get(
|
||||||
|
url_for("rss", token=rss_token, tag="extra-import-tag", _external=True),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"should-be-excluded" not in res.data
|
||||||
|
assert res.status_code == 200
|
||||||
|
assert b"first-imported=1" in res.data
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
def test_tag_import_singular(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": test_url + " test-tag, test-tag\r\n"+ test_url + "?x=1 test-tag, test-tag\r\n"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"2 Imported" in res.data
|
||||||
|
|
||||||
|
res = client.get(
|
||||||
|
url_for("tags.tags_overview_page"),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
# Should be only 1 tag because they both had the same
|
||||||
|
assert res.data.count(b'test-tag') == 1
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
def test_tag_add_in_ui(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
#
|
||||||
|
res = client.post(
|
||||||
|
url_for("tags.form_tag_add"),
|
||||||
|
data={"name": "new-test-tag"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Tag added" in res.data
|
||||||
|
assert b"new-test-tag" in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||||
|
assert b'All tags deleted' in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
def test_group_tag_notification(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
set_original_response()
|
||||||
|
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("form_quick_watch_add"),
|
||||||
|
data={"url": test_url, "tags": 'test-tag, other-tag'},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
|
notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json')
|
||||||
|
notification_form_data = {"notification_urls": notification_url,
|
||||||
|
"notification_title": "New GROUP TAG ChangeDetection.io Notification - {{watch_url}}",
|
||||||
|
"notification_body": "BASE URL: {{base_url}}\n"
|
||||||
|
"Watch URL: {{watch_url}}\n"
|
||||||
|
"Watch UUID: {{watch_uuid}}\n"
|
||||||
|
"Watch title: {{watch_title}}\n"
|
||||||
|
"Watch tag: {{watch_tag}}\n"
|
||||||
|
"Preview: {{preview_url}}\n"
|
||||||
|
"Diff URL: {{diff_url}}\n"
|
||||||
|
"Snapshot: {{current_snapshot}}\n"
|
||||||
|
"Diff: {{diff}}\n"
|
||||||
|
"Diff Added: {{diff_added}}\n"
|
||||||
|
"Diff Removed: {{diff_removed}}\n"
|
||||||
|
"Diff Full: {{diff_full}}\n"
|
||||||
|
"Diff as Patch: {{diff_patch}}\n"
|
||||||
|
":-)",
|
||||||
|
"notification_screenshot": True,
|
||||||
|
"notification_format": "Text",
|
||||||
|
"title": "test-tag"}
|
||||||
|
|
||||||
|
res = client.post(
|
||||||
|
url_for("tags.form_tag_edit_submit", uuid=get_UUID_for_tag_name(client, name="test-tag")),
|
||||||
|
data=notification_form_data,
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"Updated" in res.data
|
||||||
|
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
set_modified_response()
|
||||||
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
time.sleep(3)
|
||||||
|
|
||||||
|
assert os.path.isfile("test-datastore/notification.txt")
|
||||||
|
|
||||||
|
# Verify what was sent as a notification, this file should exist
|
||||||
|
with open("test-datastore/notification.txt", "r") as f:
|
||||||
|
notification_submission = f.read()
|
||||||
|
os.unlink("test-datastore/notification.txt")
|
||||||
|
|
||||||
|
# Did we see the URL that had a change, in the notification?
|
||||||
|
# Diff was correctly executed
|
||||||
|
assert test_url in notification_submission
|
||||||
|
assert ':-)' in notification_submission
|
||||||
|
assert "Diff Full: Some initial text" in notification_submission
|
||||||
|
assert "New GROUP TAG ChangeDetection.io" in notification_submission
|
||||||
|
assert "test-tag" in notification_submission
|
||||||
|
assert "other-tag" in notification_submission
|
||||||
|
|
||||||
|
#@todo Test that multiple notifications fired
|
||||||
|
#@todo Test that each of multiple notifications with different settings
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
def test_limit_tag_ui(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
urls=[]
|
||||||
|
|
||||||
|
for i in range(20):
|
||||||
|
urls.append(test_url+"?x="+str(i)+" test-tag")
|
||||||
|
|
||||||
|
for i in range(20):
|
||||||
|
urls.append(test_url+"?non-grouped="+str(i))
|
||||||
|
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": "\r\n".join(urls)},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"40 Imported" in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
assert b'test-tag' in res.data
|
||||||
|
|
||||||
|
# All should be here
|
||||||
|
assert res.data.count(b'processor-text_json_diff') == 40
|
||||||
|
|
||||||
|
tag_uuid = get_UUID_for_tag_name(client, name="test-tag")
|
||||||
|
|
||||||
|
res = client.get(url_for("index", tag=tag_uuid))
|
||||||
|
|
||||||
|
# Just a subset should be here
|
||||||
|
assert b'test-tag' in res.data
|
||||||
|
assert res.data.count(b'processor-text_json_diff') == 20
|
||||||
|
assert b"object at" not in res.data
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||||
|
assert b'All tags deleted' in res.data
|
||||||
|
def test_clone_tag_on_import(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": test_url + " test-tag, another-tag\r\n"},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
assert b'test-tag' in res.data
|
||||||
|
assert b'another-tag' in res.data
|
||||||
|
|
||||||
|
watch_uuid = extract_UUID_from_client(client)
|
||||||
|
res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True)
|
||||||
|
|
||||||
|
assert b'Cloned' in res.data
|
||||||
|
# 2 times plus the top link to tag
|
||||||
|
assert res.data.count(b'test-tag') == 3
|
||||||
|
assert res.data.count(b'another-tag') == 3
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
def test_clone_tag_on_quickwatchform_add(client, live_server):
|
||||||
|
#live_server_setup(live_server)
|
||||||
|
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
|
||||||
|
res = client.post(
|
||||||
|
url_for("form_quick_watch_add"),
|
||||||
|
data={"url": test_url, "tags": ' test-tag, another-tag '},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert b"Watch added" in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
assert b'test-tag' in res.data
|
||||||
|
assert b'another-tag' in res.data
|
||||||
|
|
||||||
|
watch_uuid = extract_UUID_from_client(client)
|
||||||
|
res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True)
|
||||||
|
|
||||||
|
assert b'Cloned' in res.data
|
||||||
|
# 2 times plus the top link to tag
|
||||||
|
assert res.data.count(b'test-tag') == 3
|
||||||
|
assert res.data.count(b'another-tag') == 3
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||||
|
assert b'All tags deleted' in res.data
|
||||||
@@ -15,11 +15,24 @@ def test_strip_regex_text_func():
|
|||||||
but sometimes we want to remove the lines.
|
but sometimes we want to remove the lines.
|
||||||
|
|
||||||
but 1 lines
|
but 1 lines
|
||||||
|
skip 5 lines
|
||||||
|
really? yes man
|
||||||
|
#/not this tries weirdly formed regex or just strings starting with /
|
||||||
|
/not this
|
||||||
but including 1234 lines
|
but including 1234 lines
|
||||||
igNORe-cAse text we dont want to keep
|
igNORe-cAse text we dont want to keep
|
||||||
but not always."""
|
but not always."""
|
||||||
|
|
||||||
ignore_lines = ["sometimes", "/\s\d{2,3}\s/", "/ignore-case text/"]
|
|
||||||
|
ignore_lines = [
|
||||||
|
"sometimes",
|
||||||
|
"/\s\d{2,3}\s/",
|
||||||
|
"/ignore-case text/",
|
||||||
|
"really?",
|
||||||
|
"/skip \d lines/i",
|
||||||
|
"/not"
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
fetcher = fetch_site_status.perform_site_check(datastore=False)
|
fetcher = fetch_site_status.perform_site_check(datastore=False)
|
||||||
stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines)
|
stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines)
|
||||||
@@ -27,4 +40,10 @@ def test_strip_regex_text_func():
|
|||||||
assert b"but 1 lines" in stripped_content
|
assert b"but 1 lines" in stripped_content
|
||||||
assert b"igNORe-cAse text" not in stripped_content
|
assert b"igNORe-cAse text" not in stripped_content
|
||||||
assert b"but 1234 lines" not in stripped_content
|
assert b"but 1234 lines" not in stripped_content
|
||||||
|
assert b"really" not in stripped_content
|
||||||
|
assert b"not this" not in stripped_content
|
||||||
|
|
||||||
|
# Check line number reporting
|
||||||
|
stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines, mode="line numbers")
|
||||||
|
assert stripped_content == [2, 5, 6, 7, 8, 10]
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from . util import live_server_setup
|
from .util import live_server_setup, wait_for_all_checks
|
||||||
from changedetectionio import html_tools
|
from changedetectionio import html_tools
|
||||||
|
|
||||||
def test_setup(live_server):
|
def test_setup(live_server):
|
||||||
@@ -84,7 +84,6 @@ def set_modified_ignore_response():
|
|||||||
|
|
||||||
|
|
||||||
def test_check_ignore_text_functionality(client, live_server):
|
def test_check_ignore_text_functionality(client, live_server):
|
||||||
sleep_time_for_fetch_thread = 3
|
|
||||||
|
|
||||||
# Use a mix of case in ZzZ to prove it works case-insensitive.
|
# Use a mix of case in ZzZ to prove it works case-insensitive.
|
||||||
ignore_text = "XXXXX\r\nYYYYY\r\nzZzZZ\r\nnew ignore stuff"
|
ignore_text = "XXXXX\r\nYYYYY\r\nzZzZZ\r\nnew ignore stuff"
|
||||||
@@ -103,7 +102,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
@@ -124,7 +123,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
|||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should report nothing found (no new 'unviewed' class)
|
# It should report nothing found (no new 'unviewed' class)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -137,7 +136,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
|||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should report nothing found (no new 'unviewed' class)
|
# It should report nothing found (no new 'unviewed' class)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -151,7 +150,7 @@ def test_check_ignore_text_functionality(client, live_server):
|
|||||||
# Just to be sure.. set a regular modified change..
|
# Just to be sure.. set a regular modified change..
|
||||||
set_modified_original_ignore_response()
|
set_modified_original_ignore_response()
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
@@ -167,7 +166,6 @@ def test_check_ignore_text_functionality(client, live_server):
|
|||||||
assert b'Deleted' in res.data
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
def test_check_global_ignore_text_functionality(client, live_server):
|
def test_check_global_ignore_text_functionality(client, live_server):
|
||||||
sleep_time_for_fetch_thread = 3
|
|
||||||
|
|
||||||
# Give the endpoint time to spin up
|
# Give the endpoint time to spin up
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@@ -198,7 +196,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
|
|
||||||
# Goto the edit page of the item, add our ignore text
|
# Goto the edit page of the item, add our ignore text
|
||||||
@@ -220,7 +218,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
|||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# so that we are sure everything is viewed and in a known 'nothing changed' state
|
# so that we are sure everything is viewed and in a known 'nothing changed' state
|
||||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||||
@@ -237,7 +235,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
|||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should report nothing found (no new 'unviewed' class)
|
# It should report nothing found (no new 'unviewed' class)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -247,7 +245,7 @@ def test_check_global_ignore_text_functionality(client, live_server):
|
|||||||
# Just to be sure.. set a regular modified change that will trigger it
|
# Just to be sure.. set a regular modified change that will trigger it
|
||||||
set_modified_original_ignore_response()
|
set_modified_original_ignore_response()
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
|
|
||||||
|
|||||||
57
changedetectionio/tests/test_ignorehighlighter.py
Normal file
57
changedetectionio/tests/test_ignorehighlighter.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
import time
|
||||||
|
from flask import url_for
|
||||||
|
from .util import live_server_setup, wait_for_all_checks
|
||||||
|
from changedetectionio import html_tools
|
||||||
|
from . util import extract_UUID_from_client
|
||||||
|
|
||||||
|
def set_original_ignore_response():
|
||||||
|
test_return_data = """<html>
|
||||||
|
<body>
|
||||||
|
Some initial text<br>
|
||||||
|
<p>Which is across multiple lines</p>
|
||||||
|
<br>
|
||||||
|
So let's see what happens. <br>
|
||||||
|
<p>oh yeah 456</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
|
f.write(test_return_data)
|
||||||
|
|
||||||
|
|
||||||
|
def test_highlight_ignore(client, live_server):
|
||||||
|
live_server_setup(live_server)
|
||||||
|
set_original_ignore_response()
|
||||||
|
test_url = url_for('test_endpoint', _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": test_url},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
|
# Give the thread time to pick it up
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
uuid = extract_UUID_from_client(client)
|
||||||
|
# use the highlighter endpoint
|
||||||
|
res = client.post(
|
||||||
|
url_for("highlight_submit_ignore_url", uuid=uuid),
|
||||||
|
data={"mode": 'digit-regex', 'selection': 'oh yeah 123'},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
|
||||||
|
res = client.get(url_for("edit_page", uuid=uuid))
|
||||||
|
|
||||||
|
# should be a regex now
|
||||||
|
assert b'/oh\ yeah\ \d+/' in res.data
|
||||||
|
|
||||||
|
# Should return a link
|
||||||
|
assert b'href' in res.data
|
||||||
|
|
||||||
|
# And it should register in the preview page
|
||||||
|
res = client.get(url_for("preview_page", uuid=uuid))
|
||||||
|
assert b'<div class="ignored">oh yeah 456' in res.data
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from . util import live_server_setup
|
from .util import live_server_setup, wait_for_all_checks
|
||||||
|
|
||||||
|
|
||||||
def test_setup(live_server):
|
def test_setup(live_server):
|
||||||
@@ -40,7 +40,7 @@ def set_some_changed_response():
|
|||||||
|
|
||||||
|
|
||||||
def test_normal_page_check_works_with_ignore_status_code(client, live_server):
|
def test_normal_page_check_works_with_ignore_status_code(client, live_server):
|
||||||
sleep_time_for_fetch_thread = 3
|
|
||||||
|
|
||||||
# Give the endpoint time to spin up
|
# Give the endpoint time to spin up
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@@ -68,15 +68,15 @@ def test_normal_page_check_works_with_ignore_status_code(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
set_some_changed_response()
|
set_some_changed_response()
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should report nothing found (no new 'unviewed' class)
|
# It should report nothing found (no new 'unviewed' class)
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -109,13 +109,13 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server):
|
|||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"ignore_status_codes": "y", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"},
|
data={"ignore_status_codes": "y", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Make a change
|
# Make a change
|
||||||
set_some_changed_response()
|
set_some_changed_response()
|
||||||
@@ -123,7 +123,7 @@ def test_403_page_check_works_with_ignore_status_code(client, live_server):
|
|||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(sleep_time_for_fetch_thread)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should have 'unviewed' still
|
# It should have 'unviewed' still
|
||||||
# Because it should be looking at only that 'sametext' id
|
# Because it should be looking at only that 'sametext' id
|
||||||
|
|||||||
@@ -112,6 +112,7 @@ def test_import_distillio(client, live_server):
|
|||||||
# did the tags work?
|
# did the tags work?
|
||||||
res = client.get( url_for("index"))
|
res = client.get( url_for("index"))
|
||||||
|
|
||||||
|
# check tags
|
||||||
assert b"nice stuff" in res.data
|
assert b"nice stuff" in res.data
|
||||||
assert b"nerd-news" in res.data
|
assert b"nerd-news" in res.data
|
||||||
|
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ def test_jinja2_in_url_query(client, live_server):
|
|||||||
"date={% now 'Europe/Berlin', '%Y' %}.{% now 'Europe/Berlin', '%m' %}.{% now 'Europe/Berlin', '%d' %}", )
|
"date={% now 'Europe/Berlin', '%Y' %}.{% now 'Europe/Berlin', '%m' %}.{% now 'Europe/Berlin', '%d' %}", )
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("form_quick_watch_add"),
|
url_for("form_quick_watch_add"),
|
||||||
data={"url": full_url, "tag": "test"},
|
data={"url": full_url, "tags": "test"},
|
||||||
follow_redirects=True
|
follow_redirects=True
|
||||||
)
|
)
|
||||||
assert b"Watch added" in res.data
|
assert b"Watch added" in res.data
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from flask import url_for, escape
|
from flask import url_for, escape
|
||||||
from . util import live_server_setup
|
from . util import live_server_setup, wait_for_all_checks
|
||||||
import pytest
|
import pytest
|
||||||
jq_support = True
|
jq_support = True
|
||||||
|
|
||||||
@@ -64,6 +64,24 @@ and it can also be repeated
|
|||||||
with pytest.raises(html_tools.JSONNotFound) as e_info:
|
with pytest.raises(html_tools.JSONNotFound) as e_info:
|
||||||
html_tools.extract_json_as_string('COMPLETE GIBBERISH, NO JSON!', "jq:.id")
|
html_tools.extract_json_as_string('COMPLETE GIBBERISH, NO JSON!', "jq:.id")
|
||||||
|
|
||||||
|
|
||||||
|
def test_unittest_inline_extract_body():
|
||||||
|
content = """
|
||||||
|
<html>
|
||||||
|
<head></head>
|
||||||
|
<body>
|
||||||
|
<pre style="word-wrap: break-word; white-space: pre-wrap;">
|
||||||
|
{"testKey": 42}
|
||||||
|
</pre>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
from .. import html_tools
|
||||||
|
|
||||||
|
# See that we can find the second <script> one, which is not broken, and matches our filter
|
||||||
|
text = html_tools.extract_json_as_string(content, "json:$.testKey")
|
||||||
|
assert text == '42'
|
||||||
|
|
||||||
def set_original_ext_response():
|
def set_original_ext_response():
|
||||||
data = """
|
data = """
|
||||||
[
|
[
|
||||||
@@ -190,7 +208,7 @@ def test_check_json_without_filter(client, live_server):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
res = client.get(
|
res = client.get(
|
||||||
url_for("preview_page", uuid="first"),
|
url_for("preview_page", uuid="first"),
|
||||||
@@ -220,7 +238,7 @@ def check_json_filter(json_filter, client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
@@ -228,7 +246,7 @@ def check_json_filter(json_filter, client, live_server):
|
|||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": json_filter,
|
data={"include_filters": json_filter,
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"fetch_backend": "html_requests"
|
"fetch_backend": "html_requests"
|
||||||
},
|
},
|
||||||
@@ -243,14 +261,14 @@ def check_json_filter(json_filter, client, live_server):
|
|||||||
assert bytes(escape(json_filter).encode('utf-8')) in res.data
|
assert bytes(escape(json_filter).encode('utf-8')) in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
# Make a change
|
# Make a change
|
||||||
set_modified_response()
|
set_modified_response()
|
||||||
|
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(4)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should have 'unviewed' still
|
# It should have 'unviewed' still
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -288,14 +306,14 @@ def check_json_filter_bool_val(json_filter, client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
res = client.post(
|
res = client.post(
|
||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": json_filter,
|
data={"include_filters": json_filter,
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"fetch_backend": "html_requests"
|
"fetch_backend": "html_requests"
|
||||||
},
|
},
|
||||||
@@ -304,14 +322,14 @@ def check_json_filter_bool_val(json_filter, client, live_server):
|
|||||||
assert b"Updated watch." in res.data
|
assert b"Updated watch." in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
# Make a change
|
# Make a change
|
||||||
set_modified_response()
|
set_modified_response()
|
||||||
|
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||||
# But the change should be there, tho its hard to test the change was detected because it will show old and new versions
|
# But the change should be there, tho its hard to test the change was detected because it will show old and new versions
|
||||||
@@ -348,7 +366,7 @@ def check_json_ext_filter(json_filter, client, live_server):
|
|||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# Goto the edit page, add our ignore text
|
# Goto the edit page, add our ignore text
|
||||||
# Add our URL to the import page
|
# Add our URL to the import page
|
||||||
@@ -356,7 +374,7 @@ def check_json_ext_filter(json_filter, client, live_server):
|
|||||||
url_for("edit_page", uuid="first"),
|
url_for("edit_page", uuid="first"),
|
||||||
data={"include_filters": json_filter,
|
data={"include_filters": json_filter,
|
||||||
"url": test_url,
|
"url": test_url,
|
||||||
"tag": "",
|
"tags": "",
|
||||||
"headers": "",
|
"headers": "",
|
||||||
"fetch_backend": "html_requests"
|
"fetch_backend": "html_requests"
|
||||||
},
|
},
|
||||||
@@ -371,14 +389,14 @@ def check_json_ext_filter(json_filter, client, live_server):
|
|||||||
assert bytes(escape(json_filter).encode('utf-8')) in res.data
|
assert bytes(escape(json_filter).encode('utf-8')) in res.data
|
||||||
|
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(3)
|
wait_for_all_checks(client)
|
||||||
# Make a change
|
# Make a change
|
||||||
set_modified_ext_response()
|
set_modified_ext_response()
|
||||||
|
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
# Give the thread time to pick it up
|
# Give the thread time to pick it up
|
||||||
time.sleep(4)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
# It should have 'unviewed'
|
# It should have 'unviewed'
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
@@ -410,14 +428,14 @@ def test_ignore_json_order(client, live_server):
|
|||||||
)
|
)
|
||||||
assert b"1 Imported" in res.data
|
assert b"1 Imported" in res.data
|
||||||
|
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
f.write('{"world" : 123, "hello": 123}')
|
f.write('{"world" : 123, "hello": 123}')
|
||||||
|
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' not in res.data
|
assert b'unviewed' not in res.data
|
||||||
@@ -428,7 +446,7 @@ def test_ignore_json_order(client, live_server):
|
|||||||
|
|
||||||
# Trigger a check
|
# Trigger a check
|
||||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||||
time.sleep(2)
|
wait_for_all_checks(client)
|
||||||
|
|
||||||
res = client.get(url_for("index"))
|
res = client.get(url_for("index"))
|
||||||
assert b'unviewed' in res.data
|
assert b'unviewed' in res.data
|
||||||
@@ -436,6 +454,37 @@ def test_ignore_json_order(client, live_server):
|
|||||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
assert b'Deleted' in res.data
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
|
def test_correct_header_detect(client, live_server):
|
||||||
|
# Like in https://github.com/dgtlmoon/changedetection.io/pull/1593
|
||||||
|
# Specify extra html that JSON is sometimes wrapped in - when using Browserless/Puppeteer etc
|
||||||
|
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||||
|
f.write('<html><body>{"hello" : 123, "world": 123}')
|
||||||
|
|
||||||
|
# Add our URL to the import page
|
||||||
|
# Check weird casing is cleaned up and detected also
|
||||||
|
test_url = url_for('test_endpoint', content_type="aPPlication/JSon", uppercase_headers=True, _external=True)
|
||||||
|
res = client.post(
|
||||||
|
url_for("import_page"),
|
||||||
|
data={"urls": test_url},
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b"1 Imported" in res.data
|
||||||
|
wait_for_all_checks(client)
|
||||||
|
res = client.get(url_for("index"))
|
||||||
|
|
||||||
|
# Fixed in #1593
|
||||||
|
assert b'No parsable JSON found in this document' not in res.data
|
||||||
|
|
||||||
|
res = client.get(
|
||||||
|
url_for("preview_page", uuid="first"),
|
||||||
|
follow_redirects=True
|
||||||
|
)
|
||||||
|
assert b'"world":' in res.data
|
||||||
|
assert res.data.count(b'{') >= 2
|
||||||
|
|
||||||
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||||
|
assert b'Deleted' in res.data
|
||||||
|
|
||||||
def test_check_jsonpath_ext_filter(client, live_server):
|
def test_check_jsonpath_ext_filter(client, live_server):
|
||||||
check_json_ext_filter('json:$[?(@.status==Sold)]', client, live_server)
|
check_json_ext_filter('json:$[?(@.status==Sold)]', client, live_server)
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user