mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-10-30 14:17:40 +00:00
Compare commits
143 Commits
fix-legacy
...
0.48.04
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5dea5e1def | ||
|
|
0fade0a473 | ||
|
|
121e9c20e0 | ||
|
|
12cec2d541 | ||
|
|
d52e6e8e11 | ||
|
|
bae1a89b75 | ||
|
|
e49711f449 | ||
|
|
a3a3ab0622 | ||
|
|
c5fe188b28 | ||
|
|
1fb0adde54 | ||
|
|
2614b275f0 | ||
|
|
1631a55830 | ||
|
|
f00b8e4efb | ||
|
|
179ca171d4 | ||
|
|
84f2870d4f | ||
|
|
7421e0f95e | ||
|
|
c6162e48f1 | ||
|
|
feccb18cdc | ||
|
|
1462ad89ac | ||
|
|
cfb9fadec8 | ||
|
|
d9f9fa735d | ||
|
|
6084b0f23d | ||
|
|
4e18aea5ff | ||
|
|
fdba6b5566 | ||
|
|
4e6c783c45 | ||
|
|
0f0f5af7b5 | ||
|
|
7fcba26bea | ||
|
|
4bda1a234f | ||
|
|
d297850539 | ||
|
|
751239250f | ||
|
|
6aceeb01ab | ||
|
|
49bc982c69 | ||
|
|
e0abf0b505 | ||
|
|
f08a1185aa | ||
|
|
ad5d7efbbf | ||
|
|
7029d10f8b | ||
|
|
26d3a23e05 | ||
|
|
942625e1fb | ||
|
|
33c83230a6 | ||
|
|
87510becb5 | ||
|
|
5e95dc62a5 | ||
|
|
7d94535dbf | ||
|
|
563c196396 | ||
|
|
e8b82c47ca | ||
|
|
e84de7e8f4 | ||
|
|
1543edca24 | ||
|
|
82e0b99b07 | ||
|
|
b0ff9d161e | ||
|
|
c1dd681643 | ||
|
|
ecafa27833 | ||
|
|
f7d4e58613 | ||
|
|
5bb47e47db | ||
|
|
03151da68e | ||
|
|
a16a70229d | ||
|
|
9476c1076b | ||
|
|
a4959b5971 | ||
|
|
a278fa22f2 | ||
|
|
d39530b261 | ||
|
|
d4b4355ff5 | ||
|
|
c1c8de3104 | ||
|
|
5a768d7db3 | ||
|
|
f38429ec93 | ||
|
|
783926962d | ||
|
|
6cd1d50a4f | ||
|
|
54a4970a4c | ||
|
|
fd00453e6d | ||
|
|
2842ffb205 | ||
|
|
ec4e2f5649 | ||
|
|
fe8e3d1cb1 | ||
|
|
69fbafbdb7 | ||
|
|
f255165571 | ||
|
|
7ff34baa90 | ||
|
|
043378d09c | ||
|
|
af4bafcff8 | ||
|
|
b656338c63 | ||
|
|
97af190910 | ||
|
|
e9e063e18e | ||
|
|
45c444d0db | ||
|
|
00458b95c4 | ||
|
|
dad9760832 | ||
|
|
e2c2a76cb2 | ||
|
|
5b34aece96 | ||
|
|
1b625dc18a | ||
|
|
367afc81e9 | ||
|
|
ddfbef6db3 | ||
|
|
e173954cdd | ||
|
|
e830fb2320 | ||
|
|
c6589ee1b4 | ||
|
|
dc936a2e8a | ||
|
|
8c1527c1ad | ||
|
|
a5ff1cd1d7 | ||
|
|
543cb205d2 | ||
|
|
273adfa0a4 | ||
|
|
8ecfd17973 | ||
|
|
19f3851c9d | ||
|
|
7f2fa20318 | ||
|
|
e16814e40b | ||
|
|
337fcab3f1 | ||
|
|
eaccd6026c | ||
|
|
5b70625eaa | ||
|
|
60d292107d | ||
|
|
1cb38347da | ||
|
|
55fe2abf42 | ||
|
|
4225900ec3 | ||
|
|
1fb4342488 | ||
|
|
7071df061a | ||
|
|
6dd1fa2b88 | ||
|
|
371f85d544 | ||
|
|
932cf15e1e | ||
|
|
bf0d410d32 | ||
|
|
730f37c7ba | ||
|
|
8a35d62e02 | ||
|
|
f527744024 | ||
|
|
71c9b1273c | ||
|
|
ec68450df1 | ||
|
|
2fd762a783 | ||
|
|
d7e85ffe8f | ||
|
|
d23a301826 | ||
|
|
3ce6096fdb | ||
|
|
8acdcdd861 | ||
|
|
755cba33de | ||
|
|
8aae7dfae0 | ||
|
|
ed00f67a80 | ||
|
|
44e7e142f8 | ||
|
|
fe704e05a3 | ||
|
|
e756e0af5e | ||
|
|
c0b6c8581e | ||
|
|
de558f208f | ||
|
|
321426dea2 | ||
|
|
bde27c8a8f | ||
|
|
1405e962f0 | ||
|
|
a9f10946f4 | ||
|
|
6f2186b442 | ||
|
|
cf0ff26275 | ||
|
|
cffb6d748c | ||
|
|
99b0935b42 | ||
|
|
f1853b0ce7 | ||
|
|
c331612a22 | ||
|
|
445bb0dde3 | ||
|
|
8f3a6a42bc | ||
|
|
732ae1d935 | ||
|
|
5437144dff | ||
|
|
ed38012c6e |
@@ -1,18 +1,31 @@
|
||||
.git
|
||||
.github
|
||||
changedetectionio/processors/__pycache__
|
||||
changedetectionio/api/__pycache__
|
||||
changedetectionio/model/__pycache__
|
||||
changedetectionio/blueprint/price_data_follower/__pycache__
|
||||
changedetectionio/blueprint/tags/__pycache__
|
||||
changedetectionio/blueprint/__pycache__
|
||||
changedetectionio/blueprint/browser_steps/__pycache__
|
||||
changedetectionio/fetchers/__pycache__
|
||||
changedetectionio/tests/visualselector/__pycache__
|
||||
changedetectionio/tests/restock/__pycache__
|
||||
changedetectionio/tests/__pycache__
|
||||
changedetectionio/tests/fetchers/__pycache__
|
||||
changedetectionio/tests/unit/__pycache__
|
||||
changedetectionio/tests/proxy_list/__pycache__
|
||||
changedetectionio/__pycache__
|
||||
# Git
|
||||
.git/
|
||||
.gitignore
|
||||
|
||||
# GitHub
|
||||
.github/
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
**/__pycache__
|
||||
**/*.py[cod]
|
||||
|
||||
# Caches
|
||||
.mypy_cache/
|
||||
.pytest_cache/
|
||||
.ruff_cache/
|
||||
|
||||
# Distribution / packaging
|
||||
build/
|
||||
dist/
|
||||
*.egg-info*
|
||||
|
||||
# Virtual environment
|
||||
.env
|
||||
.venv/
|
||||
venv/
|
||||
|
||||
# IntelliJ IDEA
|
||||
.idea/
|
||||
|
||||
# Visual Studio
|
||||
.vscode/
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -27,6 +27,10 @@ A clear and concise description of what the bug is.
|
||||
**Version**
|
||||
*Exact version* in the top right area: 0....
|
||||
|
||||
**How did you install?**
|
||||
|
||||
Docker, Pip, from source directly etc
|
||||
|
||||
**To Reproduce**
|
||||
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
4
.github/workflows/containers.yml
vendored
4
.github/workflows/containers.yml
vendored
@@ -95,7 +95,7 @@ jobs:
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }}
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest
|
||||
ghcr.io/dgtlmoon/changedetection.io:latest
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
# Looks like this was disabled
|
||||
|
||||
2
.github/workflows/test-container-build.yml
vendored
2
.github/workflows/test-container-build.yml
vendored
@@ -64,7 +64,7 @@ jobs:
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
|
||||
7
.github/workflows/test-only.yml
vendored
7
.github/workflows/test-only.yml
vendored
@@ -37,3 +37,10 @@ jobs:
|
||||
python-version: '3.12'
|
||||
skip-pypuppeteer: true
|
||||
|
||||
test-application-3-13:
|
||||
needs: lint-code
|
||||
uses: ./.github/workflows/test-stack-reusable-workflow.yml
|
||||
with:
|
||||
python-version: '3.13'
|
||||
skip-pypuppeteer: true
|
||||
|
||||
|
||||
@@ -93,7 +93,7 @@ jobs:
|
||||
- name: Playwright and SocketPuppetBrowser - Headers and requests
|
||||
run: |
|
||||
# Settings headers playwright tests - Call back in from Sockpuppetbrowser, check headers
|
||||
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
|
||||
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'find .; cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py; pwd;find .'
|
||||
|
||||
- name: Playwright and SocketPuppetBrowser - Restock detection
|
||||
run: |
|
||||
@@ -231,9 +231,9 @@ jobs:
|
||||
docker logs test-cdio-basic-tests > output-logs/test-cdio-basic-tests-stdout-${{ env.PYTHON_VERSION }}.txt
|
||||
docker logs test-cdio-basic-tests 2> output-logs/test-cdio-basic-tests-stderr-${{ env.PYTHON_VERSION }}.txt
|
||||
|
||||
- name: Store container log
|
||||
- name: Store everything including test-datastore
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-cdio-basic-tests-output-py${{ env.PYTHON_VERSION }}
|
||||
path: output-logs
|
||||
path: .
|
||||
|
||||
39
.gitignore
vendored
39
.gitignore
vendored
@@ -1,14 +1,29 @@
|
||||
__pycache__
|
||||
.idea
|
||||
*.pyc
|
||||
datastore/url-watches.json
|
||||
datastore/*
|
||||
__pycache__
|
||||
.pytest_cache
|
||||
build
|
||||
dist
|
||||
venv
|
||||
test-datastore/*
|
||||
test-datastore
|
||||
# Byte-compiled / optimized / DLL files
|
||||
**/__pycache__
|
||||
**/*.py[cod]
|
||||
|
||||
# Caches
|
||||
.mypy_cache/
|
||||
.pytest_cache/
|
||||
.ruff_cache/
|
||||
|
||||
# Distribution / packaging
|
||||
build/
|
||||
dist/
|
||||
*.egg-info*
|
||||
|
||||
# Virtual environment
|
||||
.env
|
||||
.venv/
|
||||
venv/
|
||||
|
||||
# IDEs
|
||||
.idea
|
||||
.vscode/settings.json
|
||||
|
||||
# Datastore files
|
||||
datastore/
|
||||
test-datastore/
|
||||
|
||||
# Memory consumption log
|
||||
test-memory.log
|
||||
|
||||
54
COMMERCIAL_LICENCE.md
Normal file
54
COMMERCIAL_LICENCE.md
Normal file
@@ -0,0 +1,54 @@
|
||||
# Generally
|
||||
|
||||
In any commercial activity involving 'Hosting' (as defined herein), whether in part or in full, this license must be executed and adhered to.
|
||||
|
||||
# Commercial License Agreement
|
||||
|
||||
This Commercial License Agreement ("Agreement") is entered into by and between Web Technologies s.r.o. here-in ("Licensor") and (your company or personal name) _____________ ("Licensee"). This Agreement sets forth the terms and conditions under which Licensor provides its software ("Software") and services to Licensee for the purpose of reselling the software either in part or full, as part of any commercial activity where the activity involves a third party.
|
||||
|
||||
### Definition of Hosting
|
||||
|
||||
For the purposes of this Agreement, "hosting" means making the functionality of the Program or modified version available to third parties as a service. This includes, without limitation:
|
||||
- Enabling third parties to interact with the functionality of the Program or modified version remotely through a computer network.
|
||||
- Offering a service the value of which entirely or primarily derives from the value of the Program or modified version.
|
||||
- Offering a service that accomplishes for users the primary purpose of the Program or modified version.
|
||||
|
||||
## 1. Grant of License
|
||||
Subject to the terms and conditions of this Agreement, Licensor grants Licensee a non-exclusive, non-transferable license to install, use, and resell the Software. Licensee may:
|
||||
- Resell the Software as part of a service offering or as a standalone product.
|
||||
- Host the Software on a server and provide it as a hosted service (e.g., Software as a Service - SaaS).
|
||||
- Integrate the Software into a larger product or service that is then sold or provided for commercial purposes, where the software is used either in part or full.
|
||||
|
||||
## 2. License Fees
|
||||
Licensee agrees to pay Licensor the license fees specified in the ordering document. License fees are due and payable as specified in the ordering document. The fees may include initial licensing costs and recurring fees based on the number of end users, instances of the Software resold, or revenue generated from the resale activities.
|
||||
|
||||
## 3. Resale Conditions
|
||||
Licensee must comply with the following conditions when reselling the Software, whether the software is resold in part or full:
|
||||
- Provide end users with access to the source code under the same open-source license conditions as provided by Licensor.
|
||||
- Clearly state in all marketing and sales materials that the Software is provided under a commercial license from Licensor, and provide a link back to https://changedetection.io.
|
||||
- Ensure end users are aware of and agree to the terms of the commercial license prior to resale.
|
||||
- Do not sublicense or transfer the Software to third parties except as part of an authorized resale activity.
|
||||
|
||||
## 4. Hosting and Provision of Services
|
||||
Licensee may host the Software (either in part or full) on its servers and provide it as a hosted service to end users. The following conditions apply:
|
||||
- Licensee must ensure that all hosted versions of the Software comply with the terms of this Agreement.
|
||||
- Licensee must provide Licensor with regular reports detailing the number of end users and instances of the hosted service.
|
||||
- Any modifications to the Software made by Licensee for hosting purposes must be made available to end users under the same open-source license conditions, unless agreed otherwise.
|
||||
|
||||
## 5. Services
|
||||
Licensor will provide support and maintenance services as described in the support policy referenced in the ordering document should such an agreement be signed by all parties. Additional fees may apply for support services provided to end users resold by Licensee.
|
||||
|
||||
## 6. Reporting and Audits
|
||||
Licensee agrees to provide Licensor with regular reports detailing the number of instances, end users, and revenue generated from the resale of the Software. Licensor reserves the right to audit Licensee’s records to ensure compliance with this Agreement.
|
||||
|
||||
## 7. Term and Termination
|
||||
This Agreement shall commence on the effective date and continue for the period set forth in the ordering document unless terminated earlier in accordance with this Agreement. Either party may terminate this Agreement if the other party breaches any material term and fails to cure such breach within thirty (30) days after receipt of written notice.
|
||||
|
||||
## 8. Limitation of Liability and Disclaimer of Warranty
|
||||
Executing this commercial license does not waive the Limitation of Liability or Disclaimer of Warranty as stated in the open-source LICENSE provided with the Software. The Software is provided "as is," without warranty of any kind, express or implied, including but not limited to the warranties of merchantability, fitness for a particular purpose, and noninfringement. In no event shall the authors or copyright holders be liable for any claim, damages, or other liability, whether in an action of contract, tort, or otherwise, arising from, out of, or in connection with the Software or the use or other dealings in the Software.
|
||||
|
||||
## 9. Governing Law
|
||||
This Agreement shall be governed by and construed in accordance with the laws of the Czech Republic.
|
||||
|
||||
## Contact Information
|
||||
For commercial licensing inquiries, please contact contact@changedetection.io and dgtlmoon@gmail.com.
|
||||
@@ -32,14 +32,17 @@ RUN pip install --extra-index-url https://www.piwheels.org/simple --target=/dep
|
||||
# Playwright is an alternative to Selenium
|
||||
# Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing
|
||||
# https://github.com/dgtlmoon/changedetection.io/pull/1067 also musl/alpine (not supported)
|
||||
RUN pip install --target=/dependencies playwright~=1.41.2 \
|
||||
RUN pip install --target=/dependencies playwright~=1.48.0 \
|
||||
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
|
||||
|
||||
# Final image stage
|
||||
FROM python:${PYTHON_VERSION}-slim-bookworm
|
||||
LABEL org.opencontainers.image.source="https://github.com/dgtlmoon/changedetection.io"
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
libxslt1.1 \
|
||||
# For presenting price amounts correctly in the restock/price detection overview
|
||||
locales \
|
||||
# For pdftohtml
|
||||
poppler-utils \
|
||||
zlib1g \
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
recursive-include changedetectionio/api *
|
||||
recursive-include changedetectionio/apprise_plugin *
|
||||
recursive-include changedetectionio/blueprint *
|
||||
recursive-include changedetectionio/content_fetchers *
|
||||
recursive-include changedetectionio/model *
|
||||
|
||||
27
README.md
27
README.md
@@ -41,6 +41,20 @@ Using the **Browser Steps** configuration, add basic steps before performing cha
|
||||
After **Browser Steps** have been run, then visit the **Visual Selector** tab to refine the content you're interested in.
|
||||
Requires Playwright to be enabled.
|
||||
|
||||
### Awesome restock and price change notifications
|
||||
|
||||
Enable the _"Re-stock & Price detection for single product pages"_ option to activate the best way to monitor product pricing, this will extract any meta-data in the HTML page and give you many options to follow the pricing of the product.
|
||||
|
||||
Easily organise and monitor prices for products from the dashboard, get alerts and notifications when the price of a product changes or comes back in stock again!
|
||||
|
||||
[<img src="docs/restock-overview.png" style="max-width:100%;" alt="Easily keep an eye on product price changes directly from the UI" title="Easily keep an eye on product price changes directly from the UI" />](https://changedetection.io?src=github)
|
||||
|
||||
Set price change notification parameters, upper and lower price, price change percentage and more.
|
||||
Always know when a product for sale drops in price.
|
||||
|
||||
[<img src="docs/restock-settings.png" style="max-width:100%;" alt="Set upper lower and percentage price change notification values" title="Set upper lower and percentage price change notification values" />](https://changedetection.io?src=github)
|
||||
|
||||
|
||||
|
||||
### Example use cases
|
||||
|
||||
@@ -91,6 +105,15 @@ We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) glob
|
||||
|
||||
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||
|
||||
### Schedule web page watches in any timezone, limit by day of week and time.
|
||||
|
||||
Easily set a re-check schedule, for example you could limit the web page change detection to only operate during business hours.
|
||||
Or perhaps based on a foreign timezone (for example, you want to check for the latest news-headlines in a foreign country at 0900 AM),
|
||||
|
||||
<img src="./docs/scheduler.png" style="max-width:80%;" alt="How to monitor web page changes according to a schedule" title="How to monitor web page changes according to a schedule" />
|
||||
|
||||
Includes quick short-cut buttons to setup a schedule for **business hours only**, or **weekends**.
|
||||
|
||||
### We have a Chrome extension!
|
||||
|
||||
Easily add the current web page to your changedetection.io tool, simply install the extension and click "Sync" to connect it to your existing changedetection.io install.
|
||||
@@ -272,6 +295,10 @@ I offer commercial support, this software is depended on by network security, ae
|
||||
[release-link]: https://github.com/dgtlmoon/changedetection.io/releases
|
||||
[docker-link]: https://hub.docker.com/r/dgtlmoon/changedetection.io
|
||||
|
||||
## Commercial Licencing
|
||||
|
||||
If you are reselling this software either in part or full as part of any commercial arrangement, you must abide by our COMMERCIAL_LICENCE.md found in our code repository, please contact dgtlmoon@gmail.com and contact@changedetection.io .
|
||||
|
||||
## Third-party licenses
|
||||
|
||||
changedetectionio.html_tools.elementpath_tostring: Copyright (c), 2018-2021, SISSA (Scuola Internazionale Superiore di Studi Avanzati), Licensed under [MIT license](https://github.com/sissaschool/elementpath/blob/master/LICENSE)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Only exists for direct CLI usage
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
|
||||
|
||||
__version__ = '0.45.25'
|
||||
__version__ = '0.48.04'
|
||||
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from json.decoder import JSONDecodeError
|
||||
@@ -160,11 +160,10 @@ def main():
|
||||
)
|
||||
|
||||
# Monitored websites will not receive a Referer header when a user clicks on an outgoing link.
|
||||
# @Note: Incompatible with password login (and maybe other features) for now, submit a PR!
|
||||
@app.after_request
|
||||
def hide_referrer(response):
|
||||
if strtobool(os.getenv("HIDE_REFERER", 'false')):
|
||||
response.headers["Referrer-Policy"] = "no-referrer"
|
||||
response.headers["Referrer-Policy"] = "same-origin"
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@@ -12,9 +12,10 @@ import copy
|
||||
# See docs/README.md for rebuilding the docs/apidoc information
|
||||
|
||||
from . import api_schema
|
||||
from ..model import watch_base
|
||||
|
||||
# Build a JSON Schema atleast partially based on our Watch model
|
||||
from changedetectionio.model.Watch import base_config as watch_base_config
|
||||
watch_base_config = watch_base()
|
||||
schema = api_schema.build_watch_json_schema(watch_base_config)
|
||||
|
||||
schema_create_watch = copy.deepcopy(schema)
|
||||
@@ -57,7 +58,7 @@ class Watch(Resource):
|
||||
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
|
||||
if request.args.get('recheck'):
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid}))
|
||||
return "OK", 200
|
||||
if request.args.get('paused', '') == 'paused':
|
||||
self.datastore.data['watching'].get(uuid).pause()
|
||||
@@ -245,7 +246,7 @@ class CreateWatch(Resource):
|
||||
|
||||
new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags)
|
||||
if new_uuid:
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid}))
|
||||
return {'uuid': new_uuid}, 201
|
||||
else:
|
||||
return "Invalid or unsupported URL", 400
|
||||
@@ -302,7 +303,7 @@ class CreateWatch(Resource):
|
||||
|
||||
if request.args.get('recheck_all'):
|
||||
for uuid in self.datastore.data['watching'].keys():
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid}))
|
||||
return {'status': "OK"}, 200
|
||||
|
||||
return list, 200
|
||||
|
||||
82
changedetectionio/apprise_plugin/__init__.py
Normal file
82
changedetectionio/apprise_plugin/__init__.py
Normal file
@@ -0,0 +1,82 @@
|
||||
# include the decorator
|
||||
from apprise.decorators import notify
|
||||
from loguru import logger
|
||||
|
||||
@notify(on="delete")
|
||||
@notify(on="deletes")
|
||||
@notify(on="get")
|
||||
@notify(on="gets")
|
||||
@notify(on="post")
|
||||
@notify(on="posts")
|
||||
@notify(on="put")
|
||||
@notify(on="puts")
|
||||
def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs):
|
||||
import requests
|
||||
import json
|
||||
from urllib.parse import unquote_plus
|
||||
from apprise.utils import parse_url as apprise_parse_url
|
||||
from apprise import URLBase
|
||||
|
||||
url = kwargs['meta'].get('url')
|
||||
|
||||
if url.startswith('post'):
|
||||
r = requests.post
|
||||
elif url.startswith('get'):
|
||||
r = requests.get
|
||||
elif url.startswith('put'):
|
||||
r = requests.put
|
||||
elif url.startswith('delete'):
|
||||
r = requests.delete
|
||||
|
||||
url = url.replace('post://', 'http://')
|
||||
url = url.replace('posts://', 'https://')
|
||||
url = url.replace('put://', 'http://')
|
||||
url = url.replace('puts://', 'https://')
|
||||
url = url.replace('get://', 'http://')
|
||||
url = url.replace('gets://', 'https://')
|
||||
url = url.replace('put://', 'http://')
|
||||
url = url.replace('puts://', 'https://')
|
||||
url = url.replace('delete://', 'http://')
|
||||
url = url.replace('deletes://', 'https://')
|
||||
|
||||
headers = {}
|
||||
params = {}
|
||||
auth = None
|
||||
|
||||
# Convert /foobar?+some-header=hello to proper header dictionary
|
||||
results = apprise_parse_url(url)
|
||||
if results:
|
||||
# Add our headers that the user can potentially over-ride if they wish
|
||||
# to to our returned result set and tidy entries by unquoting them
|
||||
headers = {unquote_plus(x): unquote_plus(y)
|
||||
for x, y in results['qsd+'].items()}
|
||||
|
||||
# https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation
|
||||
# In Apprise, it relies on prefixing each request arg with "-", because it uses say &method=update as a flag for apprise
|
||||
# but here we are making straight requests, so we need todo convert this against apprise's logic
|
||||
for k, v in results['qsd'].items():
|
||||
if not k.strip('+-') in results['qsd+'].keys():
|
||||
params[unquote_plus(k)] = unquote_plus(v)
|
||||
|
||||
# Determine Authentication
|
||||
auth = ''
|
||||
if results.get('user') and results.get('password'):
|
||||
auth = (unquote_plus(results.get('user')), unquote_plus(results.get('user')))
|
||||
elif results.get('user'):
|
||||
auth = (unquote_plus(results.get('user')))
|
||||
|
||||
# Try to auto-guess if it's JSON
|
||||
h = 'application/json; charset=utf-8'
|
||||
try:
|
||||
json.loads(body)
|
||||
headers['Content-Type'] = h
|
||||
except ValueError as e:
|
||||
logger.warning(f"Could not automatically add '{h}' header to the {kwargs['meta'].get('schema')}:// notification because the document failed to parse as JSON: {e}")
|
||||
pass
|
||||
|
||||
r(results.get('url'),
|
||||
auth=auth,
|
||||
data=body.encode('utf-8') if type(body) is str else body,
|
||||
headers=headers,
|
||||
params=params
|
||||
)
|
||||
164
changedetectionio/blueprint/backups/__init__.py
Normal file
164
changedetectionio/blueprint/backups/__init__.py
Normal file
@@ -0,0 +1,164 @@
|
||||
import datetime
|
||||
import glob
|
||||
import threading
|
||||
|
||||
from flask import Blueprint, render_template, send_from_directory, flash, url_for, redirect, abort
|
||||
import os
|
||||
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
from changedetectionio.flask_app import login_optionally_required
|
||||
from loguru import logger
|
||||
|
||||
BACKUP_FILENAME_FORMAT = "changedetection-backup-{}.zip"
|
||||
|
||||
|
||||
def create_backup(datastore_path, watches: dict):
|
||||
logger.debug("Creating backup...")
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
# create a ZipFile object
|
||||
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
backupname = BACKUP_FILENAME_FORMAT.format(timestamp)
|
||||
backup_filepath = os.path.join(datastore_path, backupname)
|
||||
|
||||
with zipfile.ZipFile(backup_filepath.replace('.zip', '.tmp'), "w",
|
||||
compression=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8) as zipObj:
|
||||
|
||||
# Add the index
|
||||
zipObj.write(os.path.join(datastore_path, "url-watches.json"), arcname="url-watches.json")
|
||||
|
||||
# Add the flask app secret
|
||||
zipObj.write(os.path.join(datastore_path, "secret.txt"), arcname="secret.txt")
|
||||
|
||||
# Add any data in the watch data directory.
|
||||
for uuid, w in watches.items():
|
||||
for f in Path(w.watch_data_dir).glob('*'):
|
||||
zipObj.write(f,
|
||||
# Use the full path to access the file, but make the file 'relative' in the Zip.
|
||||
arcname=os.path.join(f.parts[-2], f.parts[-1]),
|
||||
compress_type=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8)
|
||||
|
||||
# Create a list file with just the URLs, so it's easier to port somewhere else in the future
|
||||
list_file = "url-list.txt"
|
||||
with open(os.path.join(datastore_path, list_file), "w") as f:
|
||||
for uuid in watches:
|
||||
url = watches[uuid]["url"]
|
||||
f.write("{}\r\n".format(url))
|
||||
list_with_tags_file = "url-list-with-tags.txt"
|
||||
with open(
|
||||
os.path.join(datastore_path, list_with_tags_file), "w"
|
||||
) as f:
|
||||
for uuid in watches:
|
||||
url = watches[uuid].get('url')
|
||||
tag = watches[uuid].get('tags', {})
|
||||
f.write("{} {}\r\n".format(url, tag))
|
||||
|
||||
# Add it to the Zip
|
||||
zipObj.write(
|
||||
os.path.join(datastore_path, list_file),
|
||||
arcname=list_file,
|
||||
compress_type=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8,
|
||||
)
|
||||
zipObj.write(
|
||||
os.path.join(datastore_path, list_with_tags_file),
|
||||
arcname=list_with_tags_file,
|
||||
compress_type=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8,
|
||||
)
|
||||
|
||||
# Now it's done, rename it so it shows up finally and its completed being written.
|
||||
os.rename(backup_filepath.replace('.zip', '.tmp'), backup_filepath.replace('.tmp', '.zip'))
|
||||
|
||||
|
||||
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
backups_blueprint = Blueprint('backups', __name__, template_folder="templates")
|
||||
backup_threads = []
|
||||
|
||||
@login_optionally_required
|
||||
@backups_blueprint.route("/request-backup", methods=['GET'])
|
||||
def request_backup():
|
||||
if any(thread.is_alive() for thread in backup_threads):
|
||||
flash("A backup is already running, check back in a few minutes", "error")
|
||||
return redirect(url_for('backups.index'))
|
||||
|
||||
if len(find_backups()) > int(os.getenv("MAX_NUMBER_BACKUPS", 100)):
|
||||
flash("Maximum number of backups reached, please remove some", "error")
|
||||
return redirect(url_for('backups.index'))
|
||||
|
||||
# Be sure we're written fresh
|
||||
datastore.sync_to_json()
|
||||
zip_thread = threading.Thread(target=create_backup, args=(datastore.datastore_path, datastore.data.get("watching")))
|
||||
zip_thread.start()
|
||||
backup_threads.append(zip_thread)
|
||||
flash("Backup building in background, check back in a few minutes.")
|
||||
|
||||
return redirect(url_for('backups.index'))
|
||||
|
||||
def find_backups():
|
||||
backup_filepath = os.path.join(datastore.datastore_path, BACKUP_FILENAME_FORMAT.format("*"))
|
||||
backups = glob.glob(backup_filepath)
|
||||
backup_info = []
|
||||
|
||||
for backup in backups:
|
||||
size = os.path.getsize(backup) / (1024 * 1024)
|
||||
creation_time = os.path.getctime(backup)
|
||||
backup_info.append({
|
||||
'filename': os.path.basename(backup),
|
||||
'filesize': f"{size:.2f}",
|
||||
'creation_time': creation_time
|
||||
})
|
||||
|
||||
backup_info.sort(key=lambda x: x['creation_time'], reverse=True)
|
||||
|
||||
return backup_info
|
||||
|
||||
@login_optionally_required
|
||||
@backups_blueprint.route("/download/<string:filename>", methods=['GET'])
|
||||
def download_backup(filename):
|
||||
import re
|
||||
filename = filename.strip()
|
||||
backup_filename_regex = BACKUP_FILENAME_FORMAT.format("\d+")
|
||||
|
||||
full_path = os.path.join(os.path.abspath(datastore.datastore_path), filename)
|
||||
if not full_path.startswith(os.path.abspath(datastore.datastore_path)):
|
||||
abort(404)
|
||||
|
||||
if filename == 'latest':
|
||||
backups = find_backups()
|
||||
filename = backups[0]['filename']
|
||||
|
||||
if not re.match(r"^" + backup_filename_regex + "$", filename):
|
||||
abort(400) # Bad Request if the filename doesn't match the pattern
|
||||
|
||||
logger.debug(f"Backup download request for '{full_path}'")
|
||||
return send_from_directory(os.path.abspath(datastore.datastore_path), filename, as_attachment=True)
|
||||
|
||||
@login_optionally_required
|
||||
@backups_blueprint.route("/", methods=['GET'])
|
||||
def index():
|
||||
backups = find_backups()
|
||||
output = render_template("overview.html",
|
||||
available_backups=backups,
|
||||
backup_running=any(thread.is_alive() for thread in backup_threads)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
@login_optionally_required
|
||||
@backups_blueprint.route("/remove-backups", methods=['GET'])
|
||||
def remove_backups():
|
||||
|
||||
backup_filepath = os.path.join(datastore.datastore_path, BACKUP_FILENAME_FORMAT.format("*"))
|
||||
backups = glob.glob(backup_filepath)
|
||||
for backup in backups:
|
||||
os.unlink(backup)
|
||||
|
||||
flash("Backups were deleted.")
|
||||
|
||||
return redirect(url_for('backups.index'))
|
||||
|
||||
return backups_blueprint
|
||||
36
changedetectionio/blueprint/backups/templates/overview.html
Normal file
36
changedetectionio/blueprint/backups/templates/overview.html
Normal file
@@ -0,0 +1,36 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.html' import render_simple_field, render_field %}
|
||||
<div class="edit-form">
|
||||
<div class="box-wrap inner">
|
||||
<h4>Backups</h4>
|
||||
{% if backup_running %}
|
||||
<p>
|
||||
<strong>A backup is running!</strong>
|
||||
</p>
|
||||
{% endif %}
|
||||
<p>
|
||||
Here you can download and request a new backup, when a backup is completed you will see it listed below.
|
||||
</p>
|
||||
<br>
|
||||
{% if available_backups %}
|
||||
<ul>
|
||||
{% for backup in available_backups %}
|
||||
<li><a href="{{ url_for('backups.download_backup', filename=backup["filename"]) }}">{{ backup["filename"] }}</a> {{ backup["filesize"] }} Mb</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% else %}
|
||||
<p>
|
||||
<strong>No backups found.</strong>
|
||||
</p>
|
||||
{% endif %}
|
||||
|
||||
<a class="pure-button pure-button-primary" href="{{ url_for('backups.request_backup') }}">Create backup</a>
|
||||
{% if available_backups %}
|
||||
<a class="pure-button button-small button-error " href="{{ url_for('backups.remove_backups') }}">Remove backups</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
{% endblock %}
|
||||
@@ -85,7 +85,8 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
browsersteps_start_session['browserstepper'] = browser_steps.browsersteps_live_ui(
|
||||
playwright_browser=browsersteps_start_session['browser'],
|
||||
proxy=proxy,
|
||||
start_url=datastore.data['watching'][watch_uuid].get('url')
|
||||
start_url=datastore.data['watching'][watch_uuid].get('url'),
|
||||
headers=datastore.data['watching'][watch_uuid].get('headers')
|
||||
)
|
||||
|
||||
# For test
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import time
|
||||
@@ -25,6 +25,7 @@ browser_step_ui_config = {'Choose one': '0 0',
|
||||
'Click element if exists': '1 0',
|
||||
'Click element': '1 0',
|
||||
'Click element containing text': '0 1',
|
||||
'Click element containing text if exists': '0 1',
|
||||
'Enter text in field': '1 1',
|
||||
'Execute JS': '0 1',
|
||||
# 'Extract text and use as filter': '1 0',
|
||||
@@ -96,12 +97,24 @@ class steppable_browser_interface():
|
||||
return self.action_goto_url(value=self.start_url)
|
||||
|
||||
def action_click_element_containing_text(self, selector=None, value=''):
|
||||
logger.debug("Clicking element containing text")
|
||||
if not len(value.strip()):
|
||||
return
|
||||
elem = self.page.get_by_text(value)
|
||||
if elem.count():
|
||||
elem.first.click(delay=randint(200, 500), timeout=3000)
|
||||
|
||||
def action_click_element_containing_text_if_exists(self, selector=None, value=''):
|
||||
logger.debug("Clicking element containing text if exists")
|
||||
if not len(value.strip()):
|
||||
return
|
||||
elem = self.page.get_by_text(value)
|
||||
logger.debug(f"Clicking element containing text - {elem.count()} elements found")
|
||||
if elem.count():
|
||||
elem.first.click(delay=randint(200, 500), timeout=3000)
|
||||
else:
|
||||
return
|
||||
|
||||
def action_enter_text_in_field(self, selector, value):
|
||||
if not len(selector.strip()):
|
||||
return
|
||||
@@ -256,7 +269,8 @@ class browsersteps_live_ui(steppable_browser_interface):
|
||||
def get_current_state(self):
|
||||
"""Return the screenshot and interactive elements mapping, generally always called after action_()"""
|
||||
import importlib.resources
|
||||
xpath_element_js = importlib.resources.read_text("changedetectionio.content_fetchers.res", "xpath_element_scraper.js")
|
||||
xpath_element_js = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('xpath_element_scraper.js').read_text()
|
||||
|
||||
now = time.time()
|
||||
self.page.wait_for_timeout(1 * 1000)
|
||||
|
||||
@@ -289,7 +303,7 @@ class browsersteps_live_ui(steppable_browser_interface):
|
||||
"""
|
||||
import importlib.resources
|
||||
self.page.evaluate("var include_filters=''")
|
||||
xpath_element_js = importlib.resources.read_text("changedetectionio.content_fetchers.res", "xpath_element_scraper.js")
|
||||
xpath_element_js = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('xpath_element_scraper.js').read_text()
|
||||
from changedetectionio.content_fetchers import visualselector_xpath_selectors
|
||||
xpath_element_js = xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors)
|
||||
xpath_data = self.page.evaluate("async () => {" + xpath_element_js + "}")
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import importlib
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from changedetectionio.processors.text_json_diff.processor import FilterNotFoundInResponse
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
|
||||
from functools import wraps
|
||||
@@ -30,7 +33,6 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
def long_task(uuid, preferred_proxy):
|
||||
import time
|
||||
from changedetectionio.content_fetchers import exceptions as content_fetcher_exceptions
|
||||
from changedetectionio.processors import text_json_diff
|
||||
from changedetectionio.safe_jinja import render as jinja_render
|
||||
|
||||
status = {'status': '', 'length': 0, 'text': ''}
|
||||
@@ -38,8 +40,12 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
contents = ''
|
||||
now = time.time()
|
||||
try:
|
||||
update_handler = text_json_diff.perform_site_check(datastore=datastore, watch_uuid=uuid)
|
||||
update_handler.call_browser()
|
||||
processor_module = importlib.import_module("changedetectionio.processors.text_json_diff.processor")
|
||||
update_handler = processor_module.perform_site_check(datastore=datastore,
|
||||
watch_uuid=uuid
|
||||
)
|
||||
|
||||
update_handler.call_browser(preferred_proxy_id=preferred_proxy)
|
||||
# title, size is len contents not len xfer
|
||||
except content_fetcher_exceptions.Non200ErrorCodeReceived as e:
|
||||
if e.status_code == 404:
|
||||
@@ -48,7 +54,7 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
status.update({'status': 'ERROR', 'length': len(contents), 'text': f"{e.status_code} - Access denied"})
|
||||
else:
|
||||
status.update({'status': 'ERROR', 'length': len(contents), 'text': f"Status code: {e.status_code}"})
|
||||
except text_json_diff.FilterNotFoundInResponse:
|
||||
except FilterNotFoundInResponse:
|
||||
status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but CSS/xPath filter not found (page changed layout?)"})
|
||||
except content_fetcher_exceptions.EmptyReply as e:
|
||||
if e.status_code == 403 or e.status_code == 401:
|
||||
|
||||
@@ -17,7 +17,9 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q: PriorityQueue
|
||||
@price_data_follower_blueprint.route("/<string:uuid>/accept", methods=['GET'])
|
||||
def accept(uuid):
|
||||
datastore.data['watching'][uuid]['track_ldjson_price_data'] = PRICE_DATA_TRACK_ACCEPT
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
||||
datastore.data['watching'][uuid]['processor'] = 'restock_diff'
|
||||
datastore.data['watching'][uuid].clear_watch()
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid}))
|
||||
return redirect(url_for("index"))
|
||||
|
||||
@login_required
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
from flask import Blueprint, request, make_response, render_template, flash, url_for, redirect
|
||||
from flask import Blueprint, request, render_template, flash, url_for, redirect
|
||||
|
||||
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
from changedetectionio.flask_app import login_optionally_required
|
||||
|
||||
@@ -11,6 +13,7 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
def tags_overview_page():
|
||||
from .form import SingleTag
|
||||
add_form = SingleTag(request.form)
|
||||
|
||||
sorted_tags = sorted(datastore.data['settings']['application'].get('tags').items(), key=lambda x: x[1]['title'])
|
||||
|
||||
from collections import Counter
|
||||
@@ -96,22 +99,57 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
@tags_blueprint.route("/edit/<string:uuid>", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def form_tag_edit(uuid):
|
||||
from changedetectionio import forms
|
||||
|
||||
from changedetectionio.blueprint.tags.form import group_restock_settings_form
|
||||
if uuid == 'first':
|
||||
uuid = list(datastore.data['settings']['application']['tags'].keys()).pop()
|
||||
|
||||
default = datastore.data['settings']['application']['tags'].get(uuid)
|
||||
|
||||
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||
data=default,
|
||||
)
|
||||
form.datastore=datastore # needed?
|
||||
form = group_restock_settings_form(
|
||||
formdata=request.form if request.method == 'POST' else None,
|
||||
data=default,
|
||||
extra_notification_tokens=datastore.get_unique_notification_tokens_available(),
|
||||
default_system_settings = datastore.data['settings'],
|
||||
)
|
||||
|
||||
template_args = {
|
||||
'data': default,
|
||||
'form': form,
|
||||
'watch': default,
|
||||
'extra_notification_token_placeholder_info': datastore.get_unique_notification_token_placeholders_available(),
|
||||
}
|
||||
|
||||
included_content = {}
|
||||
if form.extra_form_content():
|
||||
# So that the extra panels can access _helpers.html etc, we set the environment to load from templates/
|
||||
# And then render the code from the module
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
import importlib.resources
|
||||
templates_dir = str(importlib.resources.files("changedetectionio").joinpath('templates'))
|
||||
env = Environment(loader=FileSystemLoader(templates_dir))
|
||||
template_str = """{% from '_helpers.html' import render_field, render_checkbox_field, render_button %}
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
toggleOpacity('#overrides_watch', '#restock-fieldset-price-group', true);
|
||||
});
|
||||
</script>
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
<fieldset class="pure-group">
|
||||
{{ render_checkbox_field(form.overrides_watch) }}
|
||||
<span class="pure-form-message-inline">Used for watches in "Restock & Price detection" mode</span>
|
||||
</fieldset>
|
||||
</fieldset>
|
||||
"""
|
||||
template_str += form.extra_form_content()
|
||||
template = env.from_string(template_str)
|
||||
included_content = template.render(**template_args)
|
||||
|
||||
output = render_template("edit-tag.html",
|
||||
data=default,
|
||||
form=form,
|
||||
settings_application=datastore.data['settings']['application'],
|
||||
extra_tab_content=form.extra_tab_content() if form.extra_tab_content() else None,
|
||||
extra_form_content=included_content,
|
||||
**template_args
|
||||
)
|
||||
|
||||
return output
|
||||
@@ -120,14 +158,15 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
@tags_blueprint.route("/edit/<string:uuid>", methods=['POST'])
|
||||
@login_optionally_required
|
||||
def form_tag_edit_submit(uuid):
|
||||
from changedetectionio import forms
|
||||
from changedetectionio.blueprint.tags.form import group_restock_settings_form
|
||||
if uuid == 'first':
|
||||
uuid = list(datastore.data['settings']['application']['tags'].keys()).pop()
|
||||
|
||||
default = datastore.data['settings']['application']['tags'].get(uuid)
|
||||
|
||||
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||
form = group_restock_settings_form(formdata=request.form if request.method == 'POST' else None,
|
||||
data=default,
|
||||
extra_notification_tokens=datastore.get_unique_notification_tokens_available()
|
||||
)
|
||||
# @todo subclass form so validation works
|
||||
#if not form.validate():
|
||||
@@ -136,6 +175,7 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
# return redirect(url_for('tags.form_tag_edit_submit', uuid=uuid))
|
||||
|
||||
datastore.data['settings']['application']['tags'][uuid].update(form.data)
|
||||
datastore.data['settings']['application']['tags'][uuid]['processor'] = 'restock_diff'
|
||||
datastore.needs_write_urgent = True
|
||||
flash("Updated")
|
||||
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
from wtforms import (
|
||||
BooleanField,
|
||||
Form,
|
||||
IntegerField,
|
||||
RadioField,
|
||||
SelectField,
|
||||
StringField,
|
||||
SubmitField,
|
||||
TextAreaField,
|
||||
validators,
|
||||
)
|
||||
from wtforms.fields.simple import BooleanField
|
||||
|
||||
from changedetectionio.processors.restock_diff.forms import processor_settings_form as restock_settings_form
|
||||
|
||||
class group_restock_settings_form(restock_settings_form):
|
||||
overrides_watch = BooleanField('Activate for individual watches in this tag/group?', default=False)
|
||||
|
||||
class SingleTag(Form):
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
</script>
|
||||
|
||||
<script src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script>
|
||||
<!--<script src="{{url_for('static_content', group='js', filename='limit.js')}}" defer></script>-->
|
||||
<script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||
|
||||
<div class="edit-form monospaced-textarea">
|
||||
@@ -26,6 +25,9 @@
|
||||
<ul>
|
||||
<li class="tab" id=""><a href="#general">General</a></li>
|
||||
<li class="tab"><a href="#filters-and-triggers">Filters & Triggers</a></li>
|
||||
{% if extra_tab_content %}
|
||||
<li class="tab"><a href="#extras_tab">{{ extra_tab_content }}</a></li>
|
||||
{% endif %}
|
||||
<li class="tab"><a href="#notifications">Notifications</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
@@ -55,9 +57,9 @@ xpath://body/div/span[contains(@class, 'example-class')]",
|
||||
{% if '/text()' in field %}
|
||||
<span class="pure-form-message-inline"><strong>Note!: //text() function does not work where the <element> contains <![CDATA[]]></strong></span><br>
|
||||
{% endif %}
|
||||
<span class="pure-form-message-inline">One rule per line, <i>any</i> rules that matches will be used.<br>
|
||||
|
||||
<ul>
|
||||
<span class="pure-form-message-inline">One CSS, xPath, JSON Path/JQ selector per line, <i>any</i> rules that matches will be used.<br>
|
||||
<div data-target="#advanced-help-selectors" class="toggle-show pure-button button-tag button-xsmall">Show advanced help and tips</div>
|
||||
<ul id="advanced-help-selectors">
|
||||
<li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li>
|
||||
<li>JSON - Limit text to this JSON rule, using either <a href="https://pypi.org/project/jsonpath-ng/" target="new">JSONPath</a> or <a href="https://stedolan.github.io/jq/" target="new">jq</a> (if installed).
|
||||
<ul>
|
||||
@@ -86,17 +88,25 @@ xpath://body/div/span[contains(@class, 'example-class')]",
|
||||
{{ render_field(form.subtractive_selectors, rows=5, placeholder="header
|
||||
footer
|
||||
nav
|
||||
.stockticker") }}
|
||||
.stockticker
|
||||
//*[contains(text(), 'Advertisement')]") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li> Remove HTML element(s) by CSS selector before text conversion. </li>
|
||||
<li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li>
|
||||
<li> Remove HTML element(s) by CSS and XPath selectors before text conversion. </li>
|
||||
<li> Don't paste HTML here, use only CSS and XPath selectors </li>
|
||||
<li> Add multiple elements, CSS or XPath selectors per line to ignore multiple parts of the HTML. </li>
|
||||
</ul>
|
||||
</span>
|
||||
</fieldset>
|
||||
|
||||
</div>
|
||||
|
||||
{# rendered sub Template #}
|
||||
{% if extra_form_content %}
|
||||
<div class="tab-pane-inner" id="extras_tab">
|
||||
{{ extra_form_content|safe }}
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="tab-pane-inner" id="notifications">
|
||||
<fieldset>
|
||||
<div class="pure-control-group inline-radio">
|
||||
@@ -119,7 +129,7 @@ nav
|
||||
{% endif %}
|
||||
<a href="#notifications" id="notification-setting-reset-to-default" class="pure-button button-xsmall" style="right: 20px; top: 20px; position: absolute; background-color: #5f42dd; border-radius: 4px; font-size: 70%; color: #fff">Use system defaults</a>
|
||||
|
||||
{{ render_common_settings_form(form, emailprefix, settings_application) }}
|
||||
{{ render_common_settings_form(form, emailprefix, settings_application, extra_notification_token_placeholder_info) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
@@ -4,7 +4,9 @@ from loguru import logger
|
||||
from changedetectionio.content_fetchers.exceptions import BrowserStepsStepException
|
||||
import os
|
||||
|
||||
visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4,header,footer,section,article,aside,details,main,nav,section,summary'
|
||||
# Visual Selector scraper - 'Button' is there because some sites have <button>OUT OF STOCK</button>.
|
||||
visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4,header,footer,section,article,aside,details,main,nav,section,summary,button'
|
||||
|
||||
|
||||
# available_fetchers() will scan this implementation looking for anything starting with html_
|
||||
# this information is used in the form selections
|
||||
|
||||
@@ -65,8 +65,8 @@ class Fetcher():
|
||||
|
||||
def __init__(self):
|
||||
import importlib.resources
|
||||
self.xpath_element_js = importlib.resources.read_text("changedetectionio.content_fetchers.res", 'xpath_element_scraper.js')
|
||||
self.instock_data_js = importlib.resources.read_text("changedetectionio.content_fetchers.res", 'stock-not-in-stock.js')
|
||||
self.xpath_element_js = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('xpath_element_scraper.js').read_text(encoding='utf-8')
|
||||
self.instock_data_js = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('stock-not-in-stock.js').read_text(encoding='utf-8')
|
||||
|
||||
@abstractmethod
|
||||
def get_error(self):
|
||||
@@ -81,7 +81,8 @@ class Fetcher():
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_include_filters=None,
|
||||
is_binary=False):
|
||||
is_binary=False,
|
||||
empty_pages_are_a_change=False):
|
||||
# Should set self.error, self.status_code and self.content
|
||||
pass
|
||||
|
||||
@@ -95,6 +96,9 @@ class Fetcher():
|
||||
|
||||
@abstractmethod
|
||||
def screenshot_step(self, step_n):
|
||||
if self.browser_steps_screenshot_path and not os.path.isdir(self.browser_steps_screenshot_path):
|
||||
logger.debug(f"> Creating data dir {self.browser_steps_screenshot_path}")
|
||||
os.mkdir(self.browser_steps_screenshot_path)
|
||||
return None
|
||||
|
||||
@abstractmethod
|
||||
@@ -168,5 +172,8 @@ class Fetcher():
|
||||
if os.path.isfile(f):
|
||||
os.unlink(f)
|
||||
|
||||
def save_step_html(self, param):
|
||||
def save_step_html(self, step_n):
|
||||
if self.browser_steps_screenshot_path and not os.path.isdir(self.browser_steps_screenshot_path):
|
||||
logger.debug(f"> Creating data dir {self.browser_steps_screenshot_path}")
|
||||
os.mkdir(self.browser_steps_screenshot_path)
|
||||
pass
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from loguru import logger
|
||||
|
||||
|
||||
class Non200ErrorCodeReceived(Exception):
|
||||
def __init__(self, status_code, url, screenshot=None, xpath_data=None, page_html=None):
|
||||
# Set this so we can use it in other parts of the app
|
||||
@@ -81,7 +80,7 @@ class ScreenshotUnavailable(Exception):
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
if page_html:
|
||||
from html_tools import html_to_text
|
||||
from changedetectionio.html_tools import html_to_text
|
||||
self.page_text = html_to_text(page_html)
|
||||
return
|
||||
|
||||
|
||||
@@ -58,6 +58,7 @@ class fetcher(Fetcher):
|
||||
self.proxy['password'] = parsed.password
|
||||
|
||||
def screenshot_step(self, step_n=''):
|
||||
super().screenshot_step(step_n=step_n)
|
||||
screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=int(os.getenv("SCREENSHOT_QUALITY", 72)))
|
||||
|
||||
if self.browser_steps_screenshot_path is not None:
|
||||
@@ -67,6 +68,7 @@ class fetcher(Fetcher):
|
||||
f.write(screenshot)
|
||||
|
||||
def save_step_html(self, step_n):
|
||||
super().save_step_html(step_n=step_n)
|
||||
content = self.page.content()
|
||||
destination = os.path.join(self.browser_steps_screenshot_path, 'step_{}.html'.format(step_n))
|
||||
logger.debug(f"Saving step HTML to {destination}")
|
||||
@@ -81,7 +83,8 @@ class fetcher(Fetcher):
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_include_filters=None,
|
||||
is_binary=False):
|
||||
is_binary=False,
|
||||
empty_pages_are_a_change=False):
|
||||
|
||||
from playwright.sync_api import sync_playwright
|
||||
import playwright._impl._errors
|
||||
@@ -128,7 +131,7 @@ class fetcher(Fetcher):
|
||||
if response is None:
|
||||
context.close()
|
||||
browser.close()
|
||||
logger.debug("Content Fetcher > Response object was none")
|
||||
logger.debug("Content Fetcher > Response object from the browser communication was none")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
try:
|
||||
@@ -164,10 +167,10 @@ class fetcher(Fetcher):
|
||||
|
||||
raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, screenshot=screenshot)
|
||||
|
||||
if len(self.page.content().strip()) == 0:
|
||||
if not empty_pages_are_a_change and len(self.page.content().strip()) == 0:
|
||||
logger.debug("Content Fetcher > Content was empty, empty_pages_are_a_change = False")
|
||||
context.close()
|
||||
browser.close()
|
||||
logger.debug("Content Fetcher > Content was empty")
|
||||
raise EmptyReply(url=url, status_code=response.status)
|
||||
|
||||
# Run Browser Steps here
|
||||
|
||||
@@ -75,7 +75,8 @@ class fetcher(Fetcher):
|
||||
request_method,
|
||||
ignore_status_codes,
|
||||
current_include_filters,
|
||||
is_binary
|
||||
is_binary,
|
||||
empty_pages_are_a_change
|
||||
):
|
||||
|
||||
from changedetectionio.content_fetchers import visualselector_xpath_selectors
|
||||
@@ -153,7 +154,7 @@ class fetcher(Fetcher):
|
||||
if response is None:
|
||||
await self.page.close()
|
||||
await browser.close()
|
||||
logger.warning("Content Fetcher > Response object was none")
|
||||
logger.warning("Content Fetcher > Response object was none (as in, the response from the browser was empty, not just the content)")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
self.headers = response.headers
|
||||
@@ -186,10 +187,11 @@ class fetcher(Fetcher):
|
||||
|
||||
raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, screenshot=screenshot)
|
||||
content = await self.page.content
|
||||
if len(content.strip()) == 0:
|
||||
|
||||
if not empty_pages_are_a_change and len(content.strip()) == 0:
|
||||
logger.error("Content Fetcher > Content was empty (empty_pages_are_a_change is False), closing browsers")
|
||||
await self.page.close()
|
||||
await browser.close()
|
||||
logger.error("Content Fetcher > Content was empty")
|
||||
raise EmptyReply(url=url, status_code=response.status)
|
||||
|
||||
# Run Browser Steps here
|
||||
@@ -247,7 +249,7 @@ class fetcher(Fetcher):
|
||||
await self.fetch_page(**kwargs)
|
||||
|
||||
def run(self, url, timeout, request_headers, request_body, request_method, ignore_status_codes=False,
|
||||
current_include_filters=None, is_binary=False):
|
||||
current_include_filters=None, is_binary=False, empty_pages_are_a_change=False):
|
||||
|
||||
#@todo make update_worker async which could run any of these content_fetchers within memory and time constraints
|
||||
max_time = os.getenv('PUPPETEER_MAX_PROCESSING_TIMEOUT_SECONDS', 180)
|
||||
@@ -262,7 +264,8 @@ class fetcher(Fetcher):
|
||||
request_method=request_method,
|
||||
ignore_status_codes=ignore_status_codes,
|
||||
current_include_filters=current_include_filters,
|
||||
is_binary=is_binary
|
||||
is_binary=is_binary,
|
||||
empty_pages_are_a_change=empty_pages_are_a_change
|
||||
), timeout=max_time))
|
||||
except asyncio.TimeoutError:
|
||||
raise(BrowserFetchTimedOut(msg=f"Browser connected but was unable to process the page in {max_time} seconds."))
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
from loguru import logger
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
import chardet
|
||||
import requests
|
||||
|
||||
from changedetectionio import strtobool
|
||||
from changedetectionio.content_fetchers.exceptions import BrowserStepsInUnsupportedFetcher, EmptyReply, Non200ErrorCodeReceived
|
||||
from changedetectionio.content_fetchers.base import Fetcher
|
||||
|
||||
@@ -25,7 +23,11 @@ class fetcher(Fetcher):
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_include_filters=None,
|
||||
is_binary=False):
|
||||
is_binary=False,
|
||||
empty_pages_are_a_change=False):
|
||||
|
||||
import chardet
|
||||
import requests
|
||||
|
||||
if self.browser_steps_get_valid_steps():
|
||||
raise BrowserStepsInUnsupportedFetcher(url=url)
|
||||
@@ -45,13 +47,19 @@ class fetcher(Fetcher):
|
||||
if self.system_https_proxy:
|
||||
proxies['https'] = self.system_https_proxy
|
||||
|
||||
r = requests.request(method=request_method,
|
||||
data=request_body,
|
||||
url=url,
|
||||
headers=request_headers,
|
||||
timeout=timeout,
|
||||
proxies=proxies,
|
||||
verify=False)
|
||||
session = requests.Session()
|
||||
|
||||
if strtobool(os.getenv('ALLOW_FILE_URI', 'false')) and url.startswith('file://'):
|
||||
from requests_file import FileAdapter
|
||||
session.mount('file://', FileAdapter())
|
||||
|
||||
r = session.request(method=request_method,
|
||||
data=request_body.encode('utf-8') if type(request_body) is str else request_body,
|
||||
url=url,
|
||||
headers=request_headers,
|
||||
timeout=timeout,
|
||||
proxies=proxies,
|
||||
verify=False)
|
||||
|
||||
# If the response did not tell us what encoding format to expect, Then use chardet to override what `requests` thinks.
|
||||
# For example - some sites don't tell us it's utf-8, but return utf-8 content
|
||||
@@ -67,7 +75,11 @@ class fetcher(Fetcher):
|
||||
self.headers = r.headers
|
||||
|
||||
if not r.content or not len(r.content):
|
||||
raise EmptyReply(url=url, status_code=r.status_code)
|
||||
logger.debug(f"Requests returned empty content for '{url}'")
|
||||
if not empty_pages_are_a_change:
|
||||
raise EmptyReply(url=url, status_code=r.status_code)
|
||||
else:
|
||||
logger.debug(f"URL {url} gave zero byte content reply with Status Code {r.status_code}, but empty_pages_are_a_change = True")
|
||||
|
||||
# @todo test this
|
||||
# @todo maybe you really want to test zero-byte return pages?
|
||||
|
||||
@@ -30,6 +30,8 @@ function isItemInStock() {
|
||||
'dieser artikel ist bald wieder verfügbar',
|
||||
'dostępne wkrótce',
|
||||
'en rupture de stock',
|
||||
'esgotado',
|
||||
'indisponível',
|
||||
'isn\'t in stock right now',
|
||||
'isnt in stock right now',
|
||||
'isn’t in stock right now',
|
||||
@@ -37,6 +39,7 @@ function isItemInStock() {
|
||||
'let me know when it\'s available',
|
||||
'mail me when available',
|
||||
'message if back in stock',
|
||||
'mevcut değil',
|
||||
'nachricht bei',
|
||||
'nicht auf lager',
|
||||
'nicht lagernd',
|
||||
@@ -48,7 +51,7 @@ function isItemInStock() {
|
||||
'niet beschikbaar',
|
||||
'niet leverbaar',
|
||||
'niet op voorraad',
|
||||
'no disponible temporalmente',
|
||||
'no disponible',
|
||||
'no longer in stock',
|
||||
'no tickets available',
|
||||
'not available',
|
||||
@@ -57,6 +60,7 @@ function isItemInStock() {
|
||||
'notify me when available',
|
||||
'notify me',
|
||||
'notify when available',
|
||||
'não disponível',
|
||||
'não estamos a aceitar encomendas',
|
||||
'out of stock',
|
||||
'out-of-stock',
|
||||
@@ -64,17 +68,20 @@ function isItemInStock() {
|
||||
'produkt niedostępny',
|
||||
'sold out',
|
||||
'sold-out',
|
||||
'stokta yok',
|
||||
'temporarily out of stock',
|
||||
'temporarily unavailable',
|
||||
'there were no search results for',
|
||||
'this item is currently unavailable',
|
||||
'tickets unavailable',
|
||||
'tijdelijk uitverkocht',
|
||||
'tükendi',
|
||||
'unavailable nearby',
|
||||
'unavailable tickets',
|
||||
'vergriffen',
|
||||
'vorbestellen',
|
||||
'vorbestellung ist bald möglich',
|
||||
'we don\'t currently have any',
|
||||
'we couldn\'t find any products that match',
|
||||
'we do not currently have an estimate of when this product will be back in stock.',
|
||||
'we don\'t know when or if this item will be back in stock.',
|
||||
@@ -153,10 +160,14 @@ function isItemInStock() {
|
||||
}
|
||||
|
||||
elementText = "";
|
||||
if (element.tagName.toLowerCase() === "input") {
|
||||
elementText = element.value.toLowerCase().trim();
|
||||
} else {
|
||||
elementText = getElementBaseText(element);
|
||||
try {
|
||||
if (element.tagName.toLowerCase() === "input") {
|
||||
elementText = element.value.toLowerCase().trim();
|
||||
} else {
|
||||
elementText = getElementBaseText(element);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('stock-not-in-stock.js scraper - handling element for gettext failed', e);
|
||||
}
|
||||
|
||||
if (elementText.length) {
|
||||
@@ -173,7 +184,8 @@ function isItemInStock() {
|
||||
const element = elementsToScan[i];
|
||||
// outside the 'fold' or some weird text in the heading area
|
||||
// .getBoundingClientRect() was causing a crash in chrome 119, can only be run on contentVisibility != hidden
|
||||
if (element.getBoundingClientRect().top + window.scrollY >= vh + 150 || element.getBoundingClientRect().top + window.scrollY <= 100) {
|
||||
// Note: theres also an automated test that places the 'out of stock' text fairly low down
|
||||
if (element.getBoundingClientRect().top + window.scrollY >= vh + 250 || element.getBoundingClientRect().top + window.scrollY <= 100) {
|
||||
continue
|
||||
}
|
||||
elementText = "";
|
||||
@@ -187,7 +199,7 @@ function isItemInStock() {
|
||||
// and these mean its out of stock
|
||||
for (const outOfStockText of outOfStockTexts) {
|
||||
if (elementText.includes(outOfStockText)) {
|
||||
console.log(`Selected 'Out of Stock' - found text "${outOfStockText}" - "${elementText}"`)
|
||||
console.log(`Selected 'Out of Stock' - found text "${outOfStockText}" - "${elementText}" - offset top ${element.getBoundingClientRect().top}, page height is ${vh}`)
|
||||
return outOfStockText; // item is out of stock
|
||||
}
|
||||
}
|
||||
|
||||
@@ -164,6 +164,15 @@ visibleElementsArray.forEach(function (element) {
|
||||
}
|
||||
}
|
||||
|
||||
let label = "not-interesting" // A placeholder, the actual labels for training are done by hand for now
|
||||
|
||||
let text = element.textContent.trim().slice(0, 30).trim();
|
||||
while (/\n{2,}|\t{2,}/.test(text)) {
|
||||
text = text.replace(/\n{2,}/g, '\n').replace(/\t{2,}/g, '\t')
|
||||
}
|
||||
|
||||
// Try to identify any possible currency amounts "Sale: 4000" or "Sale now 3000 Kc", can help with the training.
|
||||
const hasDigitCurrency = (/\d/.test(text.slice(0, 6)) || /\d/.test(text.slice(-6)) ) && /([€£$¥₩₹]|USD|AUD|EUR|Kč|kr|SEK|,–)/.test(text) ;
|
||||
|
||||
size_pos.push({
|
||||
xpath: xpath_result,
|
||||
@@ -171,9 +180,16 @@ visibleElementsArray.forEach(function (element) {
|
||||
height: Math.round(bbox['height']),
|
||||
left: Math.floor(bbox['left']),
|
||||
top: Math.floor(bbox['top']) + scroll_y,
|
||||
// tagName used by Browser Steps
|
||||
tagName: (element.tagName) ? element.tagName.toLowerCase() : '',
|
||||
// tagtype used by Browser Steps
|
||||
tagtype: (element.tagName.toLowerCase() === 'input' && element.type) ? element.type.toLowerCase() : '',
|
||||
isClickable: window.getComputedStyle(element).cursor == "pointer"
|
||||
isClickable: window.getComputedStyle(element).cursor === "pointer",
|
||||
// Used by the keras trainer
|
||||
fontSize: window.getComputedStyle(element).getPropertyValue('font-size'),
|
||||
fontWeight: window.getComputedStyle(element).getPropertyValue('font-weight'),
|
||||
hasDigitCurrency: hasDigitCurrency,
|
||||
label: label,
|
||||
});
|
||||
|
||||
});
|
||||
@@ -214,7 +230,7 @@ if (include_filters.length) {
|
||||
console.log(e);
|
||||
}
|
||||
|
||||
if (results.length) {
|
||||
if (results != null && results.length) {
|
||||
|
||||
// Iterate over the results
|
||||
results.forEach(node => {
|
||||
|
||||
@@ -56,7 +56,8 @@ class fetcher(Fetcher):
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_include_filters=None,
|
||||
is_binary=False):
|
||||
is_binary=False,
|
||||
empty_pages_are_a_change=False):
|
||||
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.chrome.options import Options as ChromeOptions
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import difflib
|
||||
from typing import List, Iterator, Union
|
||||
|
||||
REMOVED_STYLE = "background-color: #fadad7; color: #b30000;"
|
||||
ADDED_STYLE = "background-color: #eaf2c2; color: #406619;"
|
||||
|
||||
def same_slicer(lst: List[str], start: int, end: int) -> List[str]:
|
||||
"""Return a slice of the list, or a single element if start == end."""
|
||||
return lst[start:end] if start != end else [lst[start]]
|
||||
@@ -12,11 +15,12 @@ def customSequenceMatcher(
|
||||
include_removed: bool = True,
|
||||
include_added: bool = True,
|
||||
include_replaced: bool = True,
|
||||
include_change_type_prefix: bool = True
|
||||
include_change_type_prefix: bool = True,
|
||||
html_colour: bool = False
|
||||
) -> Iterator[List[str]]:
|
||||
"""
|
||||
Compare two sequences and yield differences based on specified parameters.
|
||||
|
||||
|
||||
Args:
|
||||
before (List[str]): Original sequence
|
||||
after (List[str]): Modified sequence
|
||||
@@ -25,26 +29,35 @@ def customSequenceMatcher(
|
||||
include_added (bool): Include added parts
|
||||
include_replaced (bool): Include replaced parts
|
||||
include_change_type_prefix (bool): Add prefixes to indicate change types
|
||||
|
||||
html_colour (bool): Use HTML background colors for differences
|
||||
|
||||
Yields:
|
||||
List[str]: Differences between sequences
|
||||
"""
|
||||
cruncher = difflib.SequenceMatcher(isjunk=lambda x: x in " \t", a=before, b=after)
|
||||
|
||||
|
||||
|
||||
|
||||
for tag, alo, ahi, blo, bhi in cruncher.get_opcodes():
|
||||
if include_equal and tag == 'equal':
|
||||
yield before[alo:ahi]
|
||||
elif include_removed and tag == 'delete':
|
||||
prefix = "(removed) " if include_change_type_prefix else ''
|
||||
yield [f"{prefix}{line}" for line in same_slicer(before, alo, ahi)]
|
||||
if html_colour:
|
||||
yield [f'<span style="{REMOVED_STYLE}">{line}</span>' for line in same_slicer(before, alo, ahi)]
|
||||
else:
|
||||
yield [f"(removed) {line}" for line in same_slicer(before, alo, ahi)] if include_change_type_prefix else same_slicer(before, alo, ahi)
|
||||
elif include_replaced and tag == 'replace':
|
||||
prefix_changed = "(changed) " if include_change_type_prefix else ''
|
||||
prefix_into = "(into) " if include_change_type_prefix else ''
|
||||
yield [f"{prefix_changed}{line}" for line in same_slicer(before, alo, ahi)] + \
|
||||
[f"{prefix_into}{line}" for line in same_slicer(after, blo, bhi)]
|
||||
if html_colour:
|
||||
yield [f'<span style="{REMOVED_STYLE}">{line}</span>' for line in same_slicer(before, alo, ahi)] + \
|
||||
[f'<span style="{ADDED_STYLE}">{line}</span>' for line in same_slicer(after, blo, bhi)]
|
||||
else:
|
||||
yield [f"(changed) {line}" for line in same_slicer(before, alo, ahi)] + \
|
||||
[f"(into) {line}" for line in same_slicer(after, blo, bhi)] if include_change_type_prefix else same_slicer(before, alo, ahi) + same_slicer(after, blo, bhi)
|
||||
elif include_added and tag == 'insert':
|
||||
prefix = "(added) " if include_change_type_prefix else ''
|
||||
yield [f"{prefix}{line}" for line in same_slicer(after, blo, bhi)]
|
||||
if html_colour:
|
||||
yield [f'<span style="{ADDED_STYLE}">{line}</span>' for line in same_slicer(after, blo, bhi)]
|
||||
else:
|
||||
yield [f"(added) {line}" for line in same_slicer(after, blo, bhi)] if include_change_type_prefix else same_slicer(after, blo, bhi)
|
||||
|
||||
def render_diff(
|
||||
previous_version_file_contents: str,
|
||||
@@ -55,11 +68,12 @@ def render_diff(
|
||||
include_replaced: bool = True,
|
||||
line_feed_sep: str = "\n",
|
||||
include_change_type_prefix: bool = True,
|
||||
patch_format: bool = False
|
||||
patch_format: bool = False,
|
||||
html_colour: bool = False
|
||||
) -> str:
|
||||
"""
|
||||
Render the difference between two file contents.
|
||||
|
||||
|
||||
Args:
|
||||
previous_version_file_contents (str): Original file contents
|
||||
newest_version_file_contents (str): Modified file contents
|
||||
@@ -70,7 +84,8 @@ def render_diff(
|
||||
line_feed_sep (str): Separator for lines in output
|
||||
include_change_type_prefix (bool): Add prefixes to indicate change types
|
||||
patch_format (bool): Use patch format for output
|
||||
|
||||
html_colour (bool): Use HTML background colors for differences
|
||||
|
||||
Returns:
|
||||
str: Rendered difference
|
||||
"""
|
||||
@@ -88,10 +103,11 @@ def render_diff(
|
||||
include_removed=include_removed,
|
||||
include_added=include_added,
|
||||
include_replaced=include_replaced,
|
||||
include_change_type_prefix=include_change_type_prefix
|
||||
include_change_type_prefix=include_change_type_prefix,
|
||||
html_colour=html_colour
|
||||
)
|
||||
|
||||
def flatten(lst: List[Union[str, List[str]]]) -> str:
|
||||
return line_feed_sep.join(flatten(x) if isinstance(x, list) else x for x in lst)
|
||||
|
||||
return flatten(rendered_diff)
|
||||
return flatten(rendered_diff)
|
||||
@@ -1,18 +1,24 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import flask_login
|
||||
import locale
|
||||
import os
|
||||
import pytz
|
||||
import queue
|
||||
import threading
|
||||
import time
|
||||
import timeago
|
||||
|
||||
from .processors import find_processors, get_parent_module, get_custom_watch_obj_for_processor
|
||||
from .safe_jinja import render as jinja_render
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from copy import deepcopy
|
||||
from functools import wraps
|
||||
from threading import Event
|
||||
import flask_login
|
||||
import pytz
|
||||
import timeago
|
||||
|
||||
from feedgen.feed import FeedGenerator
|
||||
from flask import (
|
||||
Flask,
|
||||
@@ -37,6 +43,7 @@ from loguru import logger
|
||||
from changedetectionio import html_tools, __version__
|
||||
from changedetectionio import queuedWatchMetaData
|
||||
from changedetectionio.api import api_v1
|
||||
from .time_handler import is_within_schedule
|
||||
|
||||
datastore = None
|
||||
|
||||
@@ -48,6 +55,7 @@ extra_stylesheets = []
|
||||
|
||||
update_q = queue.PriorityQueue()
|
||||
notification_q = queue.Queue()
|
||||
MAX_QUEUE_SIZE = 2000
|
||||
|
||||
app = Flask(__name__,
|
||||
static_url_path="",
|
||||
@@ -62,7 +70,6 @@ FlaskCompress(app)
|
||||
|
||||
# Stop browser caching of assets
|
||||
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
|
||||
|
||||
app.config.exit = Event()
|
||||
|
||||
app.config['NEW_VERSION_AVAILABLE'] = False
|
||||
@@ -79,6 +86,14 @@ csrf = CSRFProtect()
|
||||
csrf.init_app(app)
|
||||
notification_debug_log=[]
|
||||
|
||||
# Locale for correct presentation of prices etc
|
||||
default_locale = locale.getdefaultlocale()
|
||||
logger.info(f"System locale default is {default_locale}")
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, default_locale)
|
||||
except locale.Error:
|
||||
logger.warning(f"Unable to set locale {default_locale}, locale is not installed maybe?")
|
||||
|
||||
watch_api = Api(app, decorators=[csrf.exempt])
|
||||
|
||||
def init_app_secret(datastore_path):
|
||||
@@ -108,6 +123,14 @@ def get_darkmode_state():
|
||||
def get_css_version():
|
||||
return __version__
|
||||
|
||||
@app.template_filter('format_number_locale')
|
||||
def _jinja2_filter_format_number_locale(value: float) -> str:
|
||||
"Formats for example 4000.10 to the local locale default of 4,000.10"
|
||||
# Format the number with two decimal places (locale format string will return 6 decimal)
|
||||
formatted_value = locale.format_string("%.2f", value, grouping=True)
|
||||
|
||||
return formatted_value
|
||||
|
||||
# We use the whole watch object from the store/JSON so we can see if there's some related status in terms of a thread
|
||||
# running or something similar.
|
||||
@app.template_filter('format_last_checked_time')
|
||||
@@ -449,7 +472,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
continue
|
||||
if watch.get('last_error'):
|
||||
errored_count += 1
|
||||
|
||||
|
||||
if search_q:
|
||||
if (watch.get('title') and search_q in watch.get('title').lower()) or search_q in watch.get('url', '').lower():
|
||||
sorted_watches.append(watch)
|
||||
@@ -512,14 +535,32 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
@login_optionally_required
|
||||
def ajax_callback_send_notification_test(watch_uuid=None):
|
||||
|
||||
# Watch_uuid could be unsuet in the case its used in tag editor, global setings
|
||||
# Watch_uuid could be unset in the case it`s used in tag editor, global settings
|
||||
import apprise
|
||||
import random
|
||||
from .apprise_asset import asset
|
||||
apobj = apprise.Apprise(asset=asset)
|
||||
|
||||
watch = datastore.data['watching'].get(watch_uuid) if watch_uuid else None
|
||||
# so that the custom endpoints are registered
|
||||
from changedetectionio.apprise_plugin import apprise_custom_api_call_wrapper
|
||||
is_global_settings_form = request.args.get('mode', '') == 'global-settings'
|
||||
is_group_settings_form = request.args.get('mode', '') == 'group-settings'
|
||||
|
||||
notification_urls = request.form['notification_urls'].strip().splitlines()
|
||||
# Use an existing random one on the global/main settings form
|
||||
if not watch_uuid and (is_global_settings_form or is_group_settings_form) \
|
||||
and datastore.data.get('watching'):
|
||||
logger.debug(f"Send test notification - Choosing random Watch {watch_uuid}")
|
||||
watch_uuid = random.choice(list(datastore.data['watching'].keys()))
|
||||
|
||||
if not watch_uuid:
|
||||
return make_response("Error: You must have atleast one watch configured for 'test notification' to work", 400)
|
||||
|
||||
watch = datastore.data['watching'].get(watch_uuid)
|
||||
|
||||
notification_urls = None
|
||||
|
||||
if request.form.get('notification_urls'):
|
||||
notification_urls = request.form['notification_urls'].strip().splitlines()
|
||||
|
||||
if not notification_urls:
|
||||
logger.debug("Test notification - Trying by group/tag in the edit form if available")
|
||||
@@ -529,8 +570,6 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
tag = datastore.tag_exists_by_name(k.strip())
|
||||
notification_urls = tag.get('notifications_urls') if tag and tag.get('notifications_urls') else None
|
||||
|
||||
is_global_settings_form = request.args.get('mode', '') == 'global-settings'
|
||||
is_group_settings_form = request.args.get('mode', '') == 'group-settings'
|
||||
if not notification_urls and not is_global_settings_form and not is_group_settings_form:
|
||||
# In the global settings, use only what is typed currently in the text box
|
||||
logger.debug("Test notification - Trying by global system settings notifications")
|
||||
@@ -539,17 +578,17 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
|
||||
if not notification_urls:
|
||||
return 'No Notification URLs set/found'
|
||||
return 'Error: No Notification URLs set/found'
|
||||
|
||||
for n_url in notification_urls:
|
||||
if len(n_url.strip()):
|
||||
if not apobj.add(n_url):
|
||||
return f'Error - {n_url} is not a valid AppRise URL.'
|
||||
return f'Error: {n_url} is not a valid AppRise URL.'
|
||||
|
||||
try:
|
||||
# use the same as when it is triggered, but then override it with the form test values
|
||||
n_object = {
|
||||
'watch_url': request.form['window_url'],
|
||||
'watch_url': request.form.get('window_url', "https://changedetection.io"),
|
||||
'notification_urls': notification_urls
|
||||
}
|
||||
|
||||
@@ -563,11 +602,13 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
if 'notification_body' in request.form and request.form['notification_body'].strip():
|
||||
n_object['notification_body'] = request.form.get('notification_body', '').strip()
|
||||
|
||||
n_object.update(watch.extra_notification_token_values())
|
||||
|
||||
from . import update_worker
|
||||
new_worker = update_worker.update_worker(update_q, notification_q, app, datastore)
|
||||
new_worker.queue_notification_for_watch(notification_q=notification_q, n_object=n_object, watch=watch)
|
||||
except Exception as e:
|
||||
return make_response({'error': str(e)}, 400)
|
||||
return make_response(f"Error: str(e)", 400)
|
||||
|
||||
return 'OK - Sent test notifications'
|
||||
|
||||
@@ -616,11 +657,11 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
@login_optionally_required
|
||||
# https://stackoverflow.com/questions/42984453/wtforms-populate-form-with-data-if-data-exists
|
||||
# https://wtforms.readthedocs.io/en/3.0.x/forms/#wtforms.form.Form.populate_obj ?
|
||||
|
||||
def edit_page(uuid):
|
||||
from . import forms
|
||||
from .blueprint.browser_steps.browser_steps import browser_step_ui_config
|
||||
from . import processors
|
||||
import importlib
|
||||
|
||||
# More for testing, possible to return the first/only
|
||||
if not datastore.data['watching'].keys():
|
||||
@@ -652,14 +693,41 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
# Radio needs '' not None, or incase that the chosen one no longer exists
|
||||
if default['proxy'] is None or not any(default['proxy'] in tup for tup in datastore.proxy_list):
|
||||
default['proxy'] = ''
|
||||
|
||||
# proxy_override set to the json/text list of the items
|
||||
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
|
||||
data=default
|
||||
)
|
||||
|
||||
# For the form widget tag uuid lookup
|
||||
form.tags.datastore = datastore # in _value
|
||||
# Does it use some custom form? does one exist?
|
||||
processor_name = datastore.data['watching'][uuid].get('processor', '')
|
||||
processor_classes = next((tpl for tpl in find_processors() if tpl[1] == processor_name), None)
|
||||
if not processor_classes:
|
||||
flash(f"Cannot load the edit form for processor/plugin '{processor_classes[1]}', plugin missing?", 'error')
|
||||
return redirect(url_for('index'))
|
||||
|
||||
parent_module = get_parent_module(processor_classes[0])
|
||||
|
||||
try:
|
||||
# Get the parent of the "processor.py" go up one, get the form (kinda spaghetti but its reusing existing code)
|
||||
forms_module = importlib.import_module(f"{parent_module.__name__}.forms")
|
||||
# Access the 'processor_settings_form' class from the 'forms' module
|
||||
form_class = getattr(forms_module, 'processor_settings_form')
|
||||
except ModuleNotFoundError as e:
|
||||
# .forms didnt exist
|
||||
form_class = forms.processor_text_json_diff_form
|
||||
except AttributeError as e:
|
||||
# .forms exists but no useful form
|
||||
form_class = forms.processor_text_json_diff_form
|
||||
|
||||
form = form_class(formdata=request.form if request.method == 'POST' else None,
|
||||
data=default,
|
||||
extra_notification_tokens=default.extra_notification_token_values(),
|
||||
default_system_settings=datastore.data['settings']
|
||||
)
|
||||
|
||||
# For the form widget tag UUID back to "string name" for the field
|
||||
form.tags.datastore = datastore
|
||||
|
||||
# Used by some forms that need to dig deeper
|
||||
form.datastore = datastore
|
||||
form.watch = default
|
||||
|
||||
for p in datastore.extra_browsers:
|
||||
form.fetch_backend.choices.append(p)
|
||||
@@ -679,6 +747,11 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
if request.method == 'POST' and form.validate():
|
||||
|
||||
# If they changed processor, it makes sense to reset it.
|
||||
if datastore.data['watching'][uuid].get('processor') != form.data.get('processor'):
|
||||
datastore.data['watching'][uuid].clear_watch()
|
||||
flash("Reset watch history due to change of processor")
|
||||
|
||||
extra_update_obj = {
|
||||
'consecutive_filter_failures': 0,
|
||||
'last_error' : False
|
||||
@@ -720,23 +793,55 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
datastore.data['watching'][uuid].update(form.data)
|
||||
datastore.data['watching'][uuid].update(extra_update_obj)
|
||||
|
||||
if request.args.get('unpause_on_save'):
|
||||
flash("Updated watch - unpaused!")
|
||||
else:
|
||||
flash("Updated watch.")
|
||||
if not datastore.data['watching'][uuid].get('tags'):
|
||||
# Force it to be a list, because form.data['tags'] will be string if nothing found
|
||||
# And del(form.data['tags'] ) wont work either for some reason
|
||||
datastore.data['watching'][uuid]['tags'] = []
|
||||
|
||||
# Recast it if need be to right data Watch handler
|
||||
watch_class = get_custom_watch_obj_for_processor(form.data.get('processor'))
|
||||
datastore.data['watching'][uuid] = watch_class(datastore_path=datastore_o.datastore_path, default=datastore.data['watching'][uuid])
|
||||
flash("Updated watch - unpaused!" if request.args.get('unpause_on_save') else "Updated watch.")
|
||||
|
||||
# Re #286 - We wait for syncing new data to disk in another thread every 60 seconds
|
||||
# But in the case something is added we should save straight away
|
||||
datastore.needs_write_urgent = True
|
||||
|
||||
# Queue the watch for immediate recheck, with a higher priority
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
||||
# Do not queue on edit if its not within the time range
|
||||
|
||||
# @todo maybe it should never queue anyway on edit...
|
||||
is_in_schedule = True
|
||||
watch = datastore.data['watching'].get(uuid)
|
||||
|
||||
if watch.get('time_between_check_use_default'):
|
||||
time_schedule_limit = datastore.data['settings']['requests'].get('time_schedule_limit', {})
|
||||
else:
|
||||
time_schedule_limit = watch.get('time_schedule_limit')
|
||||
|
||||
tz_name = time_schedule_limit.get('timezone')
|
||||
if not tz_name:
|
||||
tz_name = datastore.data['settings']['application'].get('timezone', 'UTC')
|
||||
|
||||
if time_schedule_limit and time_schedule_limit.get('enabled'):
|
||||
try:
|
||||
is_in_schedule = is_within_schedule(time_schedule_limit=time_schedule_limit,
|
||||
default_tz=tz_name
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"{uuid} - Recheck scheduler, error handling timezone, check skipped - TZ name '{tz_name}' - {str(e)}")
|
||||
return False
|
||||
|
||||
#############################
|
||||
if not datastore.data['watching'][uuid].get('paused') and is_in_schedule:
|
||||
# Queue the watch for immediate recheck, with a higher priority
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid}))
|
||||
|
||||
# Diff page [edit] link should go back to diff page
|
||||
if request.args.get("next") and request.args.get("next") == 'diff':
|
||||
return redirect(url_for('diff_history_page', uuid=uuid))
|
||||
|
||||
return redirect(url_for('index'))
|
||||
return redirect(url_for('index', tag=request.args.get("tag",'')))
|
||||
|
||||
else:
|
||||
if request.method == 'POST' and not form.validate():
|
||||
@@ -753,31 +858,55 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
jq_support = False
|
||||
|
||||
watch = datastore.data['watching'].get(uuid)
|
||||
|
||||
system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver'
|
||||
|
||||
is_html_webdriver = False
|
||||
if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver' or watch.get('fetch_backend', '').startswith('extra_browser_'):
|
||||
is_html_webdriver = True
|
||||
|
||||
from zoneinfo import available_timezones
|
||||
|
||||
# Only works reliably with Playwright
|
||||
visualselector_enabled = os.getenv('PLAYWRIGHT_DRIVER_URL', False) and is_html_webdriver
|
||||
template_args = {
|
||||
'available_processors': processors.available_processors(),
|
||||
'available_timezones': sorted(available_timezones()),
|
||||
'browser_steps_config': browser_step_ui_config,
|
||||
'emailprefix': os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
|
||||
'extra_notification_token_placeholder_info': datastore.get_unique_notification_token_placeholders_available(),
|
||||
'extra_processor_config': form.extra_tab_content(),
|
||||
'extra_title': f" - Edit - {watch.label}",
|
||||
'form': form,
|
||||
'has_default_notification_urls': True if len(datastore.data['settings']['application']['notification_urls']) else False,
|
||||
'has_extra_headers_file': len(datastore.get_all_headers_in_textfile_for_watch(uuid=uuid)) > 0,
|
||||
'has_special_tag_options': _watch_has_tag_options_set(watch=watch),
|
||||
'is_html_webdriver': is_html_webdriver,
|
||||
'jq_support': jq_support,
|
||||
'playwright_enabled': os.getenv('PLAYWRIGHT_DRIVER_URL', False),
|
||||
'settings_application': datastore.data['settings']['application'],
|
||||
'timezone_default_config': datastore.data['settings']['application'].get('timezone'),
|
||||
'using_global_webdriver_wait': not default['webdriver_delay'],
|
||||
'uuid': uuid,
|
||||
'visualselector_enabled': visualselector_enabled,
|
||||
'watch': watch
|
||||
}
|
||||
|
||||
included_content = None
|
||||
if form.extra_form_content():
|
||||
# So that the extra panels can access _helpers.html etc, we set the environment to load from templates/
|
||||
# And then render the code from the module
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
import importlib.resources
|
||||
templates_dir = str(importlib.resources.files("changedetectionio").joinpath('templates'))
|
||||
env = Environment(loader=FileSystemLoader(templates_dir))
|
||||
template = env.from_string(form.extra_form_content())
|
||||
included_content = template.render(**template_args)
|
||||
|
||||
output = render_template("edit.html",
|
||||
available_processors=processors.available_processors(),
|
||||
browser_steps_config=browser_step_ui_config,
|
||||
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
|
||||
extra_title=f" - Edit - {watch.label}",
|
||||
form=form,
|
||||
has_default_notification_urls=True if len(datastore.data['settings']['application']['notification_urls']) else False,
|
||||
has_extra_headers_file=len(datastore.get_all_headers_in_textfile_for_watch(uuid=uuid)) > 0,
|
||||
has_special_tag_options=_watch_has_tag_options_set(watch=watch),
|
||||
is_html_webdriver=is_html_webdriver,
|
||||
jq_support=jq_support,
|
||||
playwright_enabled=os.getenv('PLAYWRIGHT_DRIVER_URL', False),
|
||||
settings_application=datastore.data['settings']['application'],
|
||||
using_global_webdriver_wait=not default['webdriver_delay'],
|
||||
uuid=uuid,
|
||||
visualselector_enabled=visualselector_enabled,
|
||||
watch=watch
|
||||
extra_tab_content=form.extra_tab_content() if form.extra_tab_content() else None,
|
||||
extra_form_content=included_content,
|
||||
**template_args
|
||||
)
|
||||
|
||||
return output
|
||||
@@ -786,6 +915,8 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
@login_optionally_required
|
||||
def settings_page():
|
||||
from changedetectionio import forms
|
||||
from datetime import datetime
|
||||
from zoneinfo import available_timezones
|
||||
|
||||
default = deepcopy(datastore.data['settings'])
|
||||
if datastore.proxy_list is not None:
|
||||
@@ -803,7 +934,8 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
# Don't use form.data on POST so that it doesnt overrid the checkbox status from the POST status
|
||||
form = forms.globalSettingsForm(formdata=request.form if request.method == 'POST' else None,
|
||||
data=default
|
||||
data=default,
|
||||
extra_notification_tokens=datastore.get_unique_notification_tokens_available()
|
||||
)
|
||||
|
||||
# Remove the last option 'System default'
|
||||
@@ -852,13 +984,20 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
else:
|
||||
flash("An error occurred, please see below.", "error")
|
||||
|
||||
# Convert to ISO 8601 format, all date/time relative events stored as UTC time
|
||||
utc_time = datetime.now(ZoneInfo("UTC")).isoformat()
|
||||
|
||||
output = render_template("settings.html",
|
||||
api_key=datastore.data['settings']['application'].get('api_access_token'),
|
||||
available_timezones=sorted(available_timezones()),
|
||||
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
|
||||
extra_notification_token_placeholder_info=datastore.get_unique_notification_token_placeholders_available(),
|
||||
form=form,
|
||||
hide_remove_pass=os.getenv("SALTED_PASS", False),
|
||||
min_system_recheck_seconds=int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 3)),
|
||||
settings_application=datastore.data['settings']['application']
|
||||
settings_application=datastore.data['settings']['application'],
|
||||
timezone_default_config=datastore.data['settings']['application'].get('timezone'),
|
||||
utc_time=utc_time,
|
||||
)
|
||||
|
||||
return output
|
||||
@@ -887,9 +1026,9 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
if request.values.get('urls') and len(request.values.get('urls').strip()):
|
||||
# Import and push into the queue for immediate update check
|
||||
importer = import_url_list()
|
||||
importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore, processor=request.values.get('processor'))
|
||||
importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore, processor=request.values.get('processor', 'text_json_diff'))
|
||||
for uuid in importer.new_uuids:
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid}))
|
||||
|
||||
if len(importer.remaining_data) == 0:
|
||||
return redirect(url_for('index'))
|
||||
@@ -902,7 +1041,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
d_importer = import_distill_io_json()
|
||||
d_importer.run(data=request.values.get('distill-io'), flash=flash, datastore=datastore)
|
||||
for uuid in d_importer.new_uuids:
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid}))
|
||||
|
||||
# XLSX importer
|
||||
if request.files and request.files.get('xlsx_file'):
|
||||
@@ -926,7 +1065,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
w_importer.run(data=file, flash=flash, datastore=datastore)
|
||||
|
||||
for uuid in w_importer.new_uuids:
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid}))
|
||||
|
||||
# Could be some remaining, or we could be on GET
|
||||
form = forms.importForm(formdata=request.form if request.method == 'POST' else None)
|
||||
@@ -1067,8 +1206,6 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
@login_optionally_required
|
||||
def preview_page(uuid):
|
||||
content = []
|
||||
ignored_line_numbers = []
|
||||
trigger_line_numbers = []
|
||||
versions = []
|
||||
timestamp = None
|
||||
|
||||
@@ -1085,11 +1222,10 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver'
|
||||
extra_stylesheets = [url_for('static_content', group='styles', filename='diff.css')]
|
||||
|
||||
|
||||
is_html_webdriver = False
|
||||
if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver' or watch.get('fetch_backend', '').startswith('extra_browser_'):
|
||||
is_html_webdriver = True
|
||||
|
||||
triggered_line_numbers = []
|
||||
if datastore.data['watching'][uuid].history_n == 0 and (watch.get_error_text() or watch.get_error_snapshot()):
|
||||
flash("Preview unavailable - No fetch/check completed or triggers not reached", "error")
|
||||
else:
|
||||
@@ -1102,31 +1238,12 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
try:
|
||||
versions = list(watch.history.keys())
|
||||
tmp = watch.get_history_snapshot(timestamp).splitlines()
|
||||
content = watch.get_history_snapshot(timestamp)
|
||||
|
||||
# Get what needs to be highlighted
|
||||
ignore_rules = watch.get('ignore_text', []) + datastore.data['settings']['application']['global_ignore_text']
|
||||
|
||||
# .readlines will keep the \n, but we will parse it here again, in the future tidy this up
|
||||
ignored_line_numbers = html_tools.strip_ignore_text(content="\n".join(tmp),
|
||||
wordlist=ignore_rules,
|
||||
mode='line numbers'
|
||||
)
|
||||
|
||||
trigger_line_numbers = html_tools.strip_ignore_text(content="\n".join(tmp),
|
||||
wordlist=watch['trigger_text'],
|
||||
mode='line numbers'
|
||||
)
|
||||
# Prepare the classes and lines used in the template
|
||||
i=0
|
||||
for l in tmp:
|
||||
classes=[]
|
||||
i+=1
|
||||
if i in ignored_line_numbers:
|
||||
classes.append('ignored')
|
||||
if i in trigger_line_numbers:
|
||||
classes.append('triggered')
|
||||
content.append({'line': l, 'classes': ' '.join(classes)})
|
||||
triggered_line_numbers = html_tools.strip_ignore_text(content=content,
|
||||
wordlist=watch['trigger_text'],
|
||||
mode='line numbers'
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
content.append({'line': f"File doesnt exist or unable to read timestamp {timestamp}", 'classes': ''})
|
||||
@@ -1137,8 +1254,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
history_n=watch.history_n,
|
||||
extra_stylesheets=extra_stylesheets,
|
||||
extra_title=f" - Diff - {watch.label} @ {timestamp}",
|
||||
ignored_line_numbers=ignored_line_numbers,
|
||||
triggered_line_numbers=trigger_line_numbers,
|
||||
triggered_line_numbers=triggered_line_numbers,
|
||||
current_diff_url=watch['url'],
|
||||
screenshot=watch.get_screenshot(),
|
||||
watch=watch,
|
||||
@@ -1162,78 +1278,6 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
return output
|
||||
|
||||
# We're good but backups are even better!
|
||||
@app.route("/backup", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def get_backup():
|
||||
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
# Remove any existing backup file, for now we just keep one file
|
||||
|
||||
for previous_backup_filename in Path(datastore_o.datastore_path).rglob('changedetection-backup-*.zip'):
|
||||
os.unlink(previous_backup_filename)
|
||||
|
||||
# create a ZipFile object
|
||||
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
backupname = "changedetection-backup-{}.zip".format(timestamp)
|
||||
backup_filepath = os.path.join(datastore_o.datastore_path, backupname)
|
||||
|
||||
with zipfile.ZipFile(backup_filepath, "w",
|
||||
compression=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8) as zipObj:
|
||||
|
||||
# Be sure we're written fresh
|
||||
datastore.sync_to_json()
|
||||
|
||||
# Add the index
|
||||
zipObj.write(os.path.join(datastore_o.datastore_path, "url-watches.json"), arcname="url-watches.json")
|
||||
|
||||
# Add the flask app secret
|
||||
zipObj.write(os.path.join(datastore_o.datastore_path, "secret.txt"), arcname="secret.txt")
|
||||
|
||||
# Add any data in the watch data directory.
|
||||
for uuid, w in datastore.data['watching'].items():
|
||||
for f in Path(w.watch_data_dir).glob('*'):
|
||||
zipObj.write(f,
|
||||
# Use the full path to access the file, but make the file 'relative' in the Zip.
|
||||
arcname=os.path.join(f.parts[-2], f.parts[-1]),
|
||||
compress_type=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8)
|
||||
|
||||
# Create a list file with just the URLs, so it's easier to port somewhere else in the future
|
||||
list_file = "url-list.txt"
|
||||
with open(os.path.join(datastore_o.datastore_path, list_file), "w") as f:
|
||||
for uuid in datastore.data["watching"]:
|
||||
url = datastore.data["watching"][uuid]["url"]
|
||||
f.write("{}\r\n".format(url))
|
||||
list_with_tags_file = "url-list-with-tags.txt"
|
||||
with open(
|
||||
os.path.join(datastore_o.datastore_path, list_with_tags_file), "w"
|
||||
) as f:
|
||||
for uuid in datastore.data["watching"]:
|
||||
url = datastore.data["watching"][uuid].get('url')
|
||||
tag = datastore.data["watching"][uuid].get('tags', {})
|
||||
f.write("{} {}\r\n".format(url, tag))
|
||||
|
||||
# Add it to the Zip
|
||||
zipObj.write(
|
||||
os.path.join(datastore_o.datastore_path, list_file),
|
||||
arcname=list_file,
|
||||
compress_type=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8,
|
||||
)
|
||||
zipObj.write(
|
||||
os.path.join(datastore_o.datastore_path, list_with_tags_file),
|
||||
arcname=list_with_tags_file,
|
||||
compress_type=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8,
|
||||
)
|
||||
|
||||
# Send_from_directory needs to be the full absolute path
|
||||
return send_from_directory(os.path.abspath(datastore_o.datastore_path), backupname, as_attachment=True)
|
||||
|
||||
@app.route("/static/<string:group>/<string:filename>", methods=['GET'])
|
||||
def static_content(group, filename):
|
||||
from flask import make_response
|
||||
@@ -1266,12 +1310,23 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
# These files should be in our subdirectory
|
||||
try:
|
||||
# set nocache, set content-type
|
||||
response = make_response(send_from_directory(os.path.join(datastore_o.datastore_path, filename), "elements.json"))
|
||||
response.headers['Content-type'] = 'application/json'
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
response.headers['Expires'] = 0
|
||||
# set nocache, set content-type,
|
||||
# `filename` is actually directory UUID of the watch
|
||||
watch_directory = str(os.path.join(datastore_o.datastore_path, filename))
|
||||
response = None
|
||||
if os.path.isfile(os.path.join(watch_directory, "elements.deflate")):
|
||||
response = make_response(send_from_directory(watch_directory, "elements.deflate"))
|
||||
response.headers['Content-Type'] = 'application/json'
|
||||
response.headers['Content-Encoding'] = 'deflate'
|
||||
else:
|
||||
logger.error(f'Request elements.deflate at "{watch_directory}" but was notfound.')
|
||||
abort(404)
|
||||
|
||||
if response:
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
response.headers['Expires'] = "0"
|
||||
|
||||
return response
|
||||
|
||||
except FileNotFoundError:
|
||||
@@ -1283,6 +1338,41 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
except FileNotFoundError:
|
||||
abort(404)
|
||||
|
||||
@app.route("/edit/<string:uuid>/get-html", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def watch_get_latest_html(uuid):
|
||||
from io import BytesIO
|
||||
from flask import send_file
|
||||
import brotli
|
||||
|
||||
watch = datastore.data['watching'].get(uuid)
|
||||
if watch and watch.history.keys() and os.path.isdir(watch.watch_data_dir):
|
||||
latest_filename = list(watch.history.keys())[-1]
|
||||
html_fname = os.path.join(watch.watch_data_dir, f"{latest_filename}.html.br")
|
||||
with open(html_fname, 'rb') as f:
|
||||
if html_fname.endswith('.br'):
|
||||
# Read and decompress the Brotli file
|
||||
decompressed_data = brotli.decompress(f.read())
|
||||
else:
|
||||
decompressed_data = f.read()
|
||||
|
||||
buffer = BytesIO(decompressed_data)
|
||||
|
||||
return send_file(buffer, as_attachment=True, download_name=f"{latest_filename}.html", mimetype='text/html')
|
||||
|
||||
|
||||
# Return a 500 error
|
||||
abort(500)
|
||||
|
||||
# Ajax callback
|
||||
@app.route("/edit/<string:uuid>/preview-rendered", methods=['POST'])
|
||||
@login_optionally_required
|
||||
def watch_get_preview_rendered(uuid):
|
||||
'''For when viewing the "preview" of the rendered text from inside of Edit'''
|
||||
from .processors.text_json_diff import prepare_filter_prevew
|
||||
return prepare_filter_prevew(watch_uuid=uuid, datastore=datastore)
|
||||
|
||||
|
||||
@app.route("/form/add/quickwatch", methods=['POST'])
|
||||
@login_optionally_required
|
||||
def form_quick_watch_add():
|
||||
@@ -1297,7 +1387,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
url = request.form.get('url').strip()
|
||||
if datastore.url_exists(url):
|
||||
flash(f'Warning, URL {url} already exists', "notice")
|
||||
|
||||
|
||||
add_paused = request.form.get('edit_and_watch_submit_button') != None
|
||||
processor = request.form.get('processor', 'text_json_diff')
|
||||
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tags').strip(), extras={'paused': add_paused, 'processor': processor})
|
||||
@@ -1305,13 +1395,13 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
if new_uuid:
|
||||
if add_paused:
|
||||
flash('Watch added in Paused state, saving will unpause.')
|
||||
return redirect(url_for('edit_page', uuid=new_uuid, unpause_on_save=1))
|
||||
return redirect(url_for('edit_page', uuid=new_uuid, unpause_on_save=1, tag=request.args.get('tag')))
|
||||
else:
|
||||
# Straight into the queue.
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid}))
|
||||
flash("Watch added.")
|
||||
|
||||
return redirect(url_for('index'))
|
||||
return redirect(url_for('index', tag=request.args.get('tag','')))
|
||||
|
||||
|
||||
|
||||
@@ -1343,7 +1433,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
new_uuid = datastore.clone(uuid)
|
||||
if new_uuid:
|
||||
if not datastore.data['watching'].get(uuid).get('paused'):
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=5, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=5, item={'uuid': new_uuid}))
|
||||
flash('Cloned.')
|
||||
|
||||
return redirect(url_for('index'))
|
||||
@@ -1364,7 +1454,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
if uuid:
|
||||
if uuid not in running_uuids:
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid}))
|
||||
i = 1
|
||||
|
||||
elif tag:
|
||||
@@ -1375,7 +1465,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
continue
|
||||
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
||||
update_q.put(
|
||||
queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False})
|
||||
queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid})
|
||||
)
|
||||
i += 1
|
||||
|
||||
@@ -1385,10 +1475,9 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
||||
if with_errors and not watch.get('last_error'):
|
||||
continue
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False}))
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid}))
|
||||
i += 1
|
||||
|
||||
flash("{} watches queued for rechecking.".format(i))
|
||||
flash(f"{i} watches queued for rechecking.")
|
||||
return redirect(url_for('index', tag=tag))
|
||||
|
||||
@app.route("/form/checkbox-operations", methods=['POST'])
|
||||
@@ -1444,7 +1533,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
uuid = uuid.strip()
|
||||
if datastore.data['watching'].get(uuid):
|
||||
# Recheck and require a full reprocessing
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid}))
|
||||
flash("{} watches queued for rechecking".format(len(uuids)))
|
||||
|
||||
elif (op == 'clear-errors'):
|
||||
@@ -1482,9 +1571,13 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
for uuid in uuids:
|
||||
uuid = uuid.strip()
|
||||
if datastore.data['watching'].get(uuid):
|
||||
# Bug in old versions caused by bad edit page/tag handler
|
||||
if isinstance(datastore.data['watching'][uuid]['tags'], str):
|
||||
datastore.data['watching'][uuid]['tags'] = []
|
||||
|
||||
datastore.data['watching'][uuid]['tags'].append(tag_uuid)
|
||||
|
||||
flash("{} watches assigned tag".format(len(uuids)))
|
||||
flash(f"{len(uuids)} watches were tagged")
|
||||
|
||||
return redirect(url_for('index'))
|
||||
|
||||
@@ -1575,13 +1668,15 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
import changedetectionio.blueprint.check_proxies as check_proxies
|
||||
app.register_blueprint(check_proxies.construct_blueprint(datastore=datastore), url_prefix='/check_proxy')
|
||||
|
||||
import changedetectionio.blueprint.backups as backups
|
||||
app.register_blueprint(backups.construct_blueprint(datastore), url_prefix='/backups')
|
||||
|
||||
# @todo handle ctrl break
|
||||
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
|
||||
threading.Thread(target=notification_runner).start()
|
||||
|
||||
# Check for new release version, but not when running in test/build or pytest
|
||||
if not os.getenv("GITHUB_REF", False) and not config.get('disable_checkver') == True:
|
||||
if not os.getenv("GITHUB_REF", False) and not strtobool(os.getenv('DISABLE_VERSION_CHECK', 'no')):
|
||||
threading.Thread(target=check_for_new_version).start()
|
||||
|
||||
return app
|
||||
@@ -1665,7 +1760,6 @@ def notification_runner():
|
||||
def ticker_thread_check_time_launch_checks():
|
||||
import random
|
||||
from changedetectionio import update_worker
|
||||
|
||||
proxy_last_called_time = {}
|
||||
|
||||
recheck_time_minimum_seconds = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 3))
|
||||
@@ -1699,12 +1793,14 @@ def ticker_thread_check_time_launch_checks():
|
||||
except RuntimeError as e:
|
||||
# RuntimeError: dictionary changed size during iteration
|
||||
time.sleep(0.1)
|
||||
watch_uuid_list = []
|
||||
else:
|
||||
break
|
||||
|
||||
# Re #438 - Don't place more watches in the queue to be checked if the queue is already large
|
||||
while update_q.qsize() >= 2000:
|
||||
time.sleep(1)
|
||||
logger.warning(f"Recheck watches queue size limit reached ({MAX_QUEUE_SIZE}), skipping adding more items")
|
||||
time.sleep(3)
|
||||
|
||||
|
||||
recheck_time_system_seconds = int(datastore.threshold_seconds)
|
||||
@@ -1721,6 +1817,28 @@ def ticker_thread_check_time_launch_checks():
|
||||
if watch['paused']:
|
||||
continue
|
||||
|
||||
# @todo - Maybe make this a hook?
|
||||
# Time schedule limit - Decide between watch or global settings
|
||||
if watch.get('time_between_check_use_default'):
|
||||
time_schedule_limit = datastore.data['settings']['requests'].get('time_schedule_limit', {})
|
||||
logger.trace(f"{uuid} Time scheduler - Using system/global settings")
|
||||
else:
|
||||
time_schedule_limit = watch.get('time_schedule_limit')
|
||||
logger.trace(f"{uuid} Time scheduler - Using watch settings (not global settings)")
|
||||
tz_name = datastore.data['settings']['application'].get('timezone', 'UTC')
|
||||
|
||||
if time_schedule_limit and time_schedule_limit.get('enabled'):
|
||||
try:
|
||||
result = is_within_schedule(time_schedule_limit=time_schedule_limit,
|
||||
default_tz=tz_name
|
||||
)
|
||||
if not result:
|
||||
logger.trace(f"{uuid} Time scheduler - not within schedule skipping.")
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"{uuid} - Recheck scheduler, error handling timezone, check skipped - TZ name '{tz_name}' - {str(e)}")
|
||||
return False
|
||||
# If they supplied an individual entry minutes to threshold.
|
||||
threshold = recheck_time_system_seconds if watch.get('time_between_check_use_default') else watch.threshold_seconds()
|
||||
|
||||
@@ -1764,7 +1882,7 @@ def ticker_thread_check_time_launch_checks():
|
||||
f"{now - watch['last_checked']:0.2f}s since last checked")
|
||||
|
||||
# Into the queue with you
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=priority, item={'uuid': uuid, 'skip_when_checksum_same': True}))
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=priority, item={'uuid': uuid}))
|
||||
|
||||
# Reset for next time
|
||||
watch.jitter_seconds = 0
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
import os
|
||||
import re
|
||||
from loguru import logger
|
||||
from wtforms.widgets.core import TimeInput
|
||||
|
||||
from changedetectionio.strtobool import strtobool
|
||||
|
||||
from wtforms import (
|
||||
BooleanField,
|
||||
Form,
|
||||
Field,
|
||||
IntegerField,
|
||||
RadioField,
|
||||
SelectField,
|
||||
@@ -123,6 +127,87 @@ class StringTagUUID(StringField):
|
||||
|
||||
return 'error'
|
||||
|
||||
class TimeDurationForm(Form):
|
||||
hours = SelectField(choices=[(f"{i}", f"{i}") for i in range(0, 25)], default="24", validators=[validators.Optional()])
|
||||
minutes = SelectField(choices=[(f"{i}", f"{i}") for i in range(0, 60)], default="00", validators=[validators.Optional()])
|
||||
|
||||
class TimeStringField(Field):
|
||||
"""
|
||||
A WTForms field for time inputs (HH:MM) that stores the value as a string.
|
||||
"""
|
||||
widget = TimeInput() # Use the built-in time input widget
|
||||
|
||||
def _value(self):
|
||||
"""
|
||||
Returns the value for rendering in the form.
|
||||
"""
|
||||
return self.data if self.data is not None else ""
|
||||
|
||||
def process_formdata(self, valuelist):
|
||||
"""
|
||||
Processes the raw input from the form and stores it as a string.
|
||||
"""
|
||||
if valuelist:
|
||||
time_str = valuelist[0]
|
||||
# Simple validation for HH:MM format
|
||||
if not time_str or len(time_str.split(":")) != 2:
|
||||
raise ValidationError("Invalid time format. Use HH:MM.")
|
||||
self.data = time_str
|
||||
|
||||
|
||||
class validateTimeZoneName(object):
|
||||
"""
|
||||
Flask wtform validators wont work with basic auth
|
||||
"""
|
||||
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
from zoneinfo import available_timezones
|
||||
python_timezones = available_timezones()
|
||||
if field.data and field.data not in python_timezones:
|
||||
raise ValidationError("Not a valid timezone name")
|
||||
|
||||
class ScheduleLimitDaySubForm(Form):
|
||||
enabled = BooleanField("not set", default=True)
|
||||
start_time = TimeStringField("Start At", default="00:00", render_kw={"placeholder": "HH:MM"}, validators=[validators.Optional()])
|
||||
duration = FormField(TimeDurationForm, label="Run duration")
|
||||
|
||||
class ScheduleLimitForm(Form):
|
||||
enabled = BooleanField("Use time scheduler", default=False)
|
||||
# Because the label for=""" doesnt line up/work with the actual checkbox
|
||||
monday = FormField(ScheduleLimitDaySubForm, label="")
|
||||
tuesday = FormField(ScheduleLimitDaySubForm, label="")
|
||||
wednesday = FormField(ScheduleLimitDaySubForm, label="")
|
||||
thursday = FormField(ScheduleLimitDaySubForm, label="")
|
||||
friday = FormField(ScheduleLimitDaySubForm, label="")
|
||||
saturday = FormField(ScheduleLimitDaySubForm, label="")
|
||||
sunday = FormField(ScheduleLimitDaySubForm, label="")
|
||||
|
||||
timezone = StringField("Optional timezone to run in",
|
||||
render_kw={"list": "timezones"},
|
||||
validators=[validateTimeZoneName()]
|
||||
)
|
||||
def __init__(
|
||||
self,
|
||||
formdata=None,
|
||||
obj=None,
|
||||
prefix="",
|
||||
data=None,
|
||||
meta=None,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(formdata, obj, prefix, data, meta, **kwargs)
|
||||
self.monday.form.enabled.label.text="Monday"
|
||||
self.tuesday.form.enabled.label.text = "Tuesday"
|
||||
self.wednesday.form.enabled.label.text = "Wednesday"
|
||||
self.thursday.form.enabled.label.text = "Thursday"
|
||||
self.friday.form.enabled.label.text = "Friday"
|
||||
self.saturday.form.enabled.label.text = "Saturday"
|
||||
self.sunday.form.enabled.label.text = "Sunday"
|
||||
|
||||
|
||||
class TimeBetweenCheckForm(Form):
|
||||
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
@@ -220,19 +305,21 @@ class ValidateAppRiseServers(object):
|
||||
def __call__(self, form, field):
|
||||
import apprise
|
||||
apobj = apprise.Apprise()
|
||||
|
||||
# so that the custom endpoints are registered
|
||||
from changedetectionio.apprise_plugin import apprise_custom_api_call_wrapper
|
||||
for server_url in field.data:
|
||||
if not apobj.add(server_url):
|
||||
message = field.gettext('\'%s\' is not a valid AppRise URL.' % (server_url))
|
||||
url = server_url.strip()
|
||||
if url.startswith("#"):
|
||||
continue
|
||||
|
||||
if not apobj.add(url):
|
||||
message = field.gettext('\'%s\' is not a valid AppRise URL.' % (url))
|
||||
raise ValidationError(message)
|
||||
|
||||
class ValidateJinja2Template(object):
|
||||
"""
|
||||
Validates that a {token} is from a valid set
|
||||
"""
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
from changedetectionio import notification
|
||||
|
||||
@@ -247,6 +334,10 @@ class ValidateJinja2Template(object):
|
||||
try:
|
||||
jinja2_env = ImmutableSandboxedEnvironment(loader=BaseLoader)
|
||||
jinja2_env.globals.update(notification.valid_tokens)
|
||||
# Extra validation tokens provided on the form_class(... extra_tokens={}) setup
|
||||
if hasattr(field, 'extra_notification_tokens'):
|
||||
jinja2_env.globals.update(field.extra_notification_tokens)
|
||||
|
||||
jinja2_env.from_string(joined_data).render()
|
||||
except TemplateSyntaxError as e:
|
||||
raise ValidationError(f"This is not a valid Jinja2 template: {e}") from e
|
||||
@@ -275,6 +366,7 @@ class validateURL(object):
|
||||
# This should raise a ValidationError() or not
|
||||
validate_url(field.data)
|
||||
|
||||
|
||||
def validate_url(test_url):
|
||||
# If hosts that only contain alphanumerics are allowed ("localhost" for example)
|
||||
try:
|
||||
@@ -419,15 +511,25 @@ class quickWatchForm(Form):
|
||||
|
||||
# Common to a single watch and the global settings
|
||||
class commonSettingsForm(Form):
|
||||
from . import processors
|
||||
|
||||
notification_urls = StringListField('Notification URL List', validators=[validators.Optional(), ValidateAppRiseServers(), ValidateJinja2Template()])
|
||||
notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()])
|
||||
def __init__(self, formdata=None, obj=None, prefix="", data=None, meta=None, **kwargs):
|
||||
super().__init__(formdata, obj, prefix, data, meta, **kwargs)
|
||||
self.notification_body.extra_notification_tokens = kwargs.get('extra_notification_tokens', {})
|
||||
self.notification_title.extra_notification_tokens = kwargs.get('extra_notification_tokens', {})
|
||||
self.notification_urls.extra_notification_tokens = kwargs.get('extra_notification_tokens', {})
|
||||
|
||||
extract_title_as_title = BooleanField('Extract <title> from document and use as watch title', default=False)
|
||||
fetch_backend = RadioField(u'Fetch Method', choices=content_fetchers.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
notification_body = TextAreaField('Notification Body', default='{{ watch_url }} had a change.', validators=[validators.Optional(), ValidateJinja2Template()])
|
||||
notification_format = SelectField('Notification format', choices=valid_notification_formats.keys())
|
||||
fetch_backend = RadioField(u'Fetch Method', choices=content_fetchers.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
extract_title_as_title = BooleanField('Extract <title> from document and use as watch title', default=False)
|
||||
webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1,
|
||||
message="Should contain one or more seconds")])
|
||||
notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()])
|
||||
notification_urls = StringListField('Notification URL List', validators=[validators.Optional(), ValidateAppRiseServers(), ValidateJinja2Template()])
|
||||
processor = RadioField( label=u"Processor - What do you want to achieve?", choices=processors.available_processors(), default="text_json_diff")
|
||||
timezone = StringField("Timezone for watch schedule", render_kw={"list": "timezones"}, validators=[validateTimeZoneName()])
|
||||
webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, message="Should contain one or more seconds")])
|
||||
|
||||
|
||||
class importForm(Form):
|
||||
from . import processors
|
||||
processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff")
|
||||
@@ -435,7 +537,6 @@ class importForm(Form):
|
||||
xlsx_file = FileField('Upload .xlsx file', validators=[FileAllowed(['xlsx'], 'Must be .xlsx file!')])
|
||||
file_mapping = SelectField('File mapping', [validators.DataRequired()], choices={('wachete', 'Wachete mapping'), ('custom','Custom mapping')})
|
||||
|
||||
|
||||
class SingleBrowserStep(Form):
|
||||
|
||||
operation = SelectField('Operation', [validators.Optional()], choices=browser_step_ui_config.keys())
|
||||
@@ -447,44 +548,46 @@ class SingleBrowserStep(Form):
|
||||
# remove_button = SubmitField('-', render_kw={"type": "button", "class": "pure-button pure-button-primary", 'title': 'Remove'})
|
||||
# add_button = SubmitField('+', render_kw={"type": "button", "class": "pure-button pure-button-primary", 'title': 'Add new step after'})
|
||||
|
||||
class watchForm(commonSettingsForm):
|
||||
class processor_text_json_diff_form(commonSettingsForm):
|
||||
|
||||
url = fields.URLField('URL', validators=[validateURL()])
|
||||
tags = StringTagUUID('Group tag', [validators.Optional()], default='')
|
||||
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
|
||||
time_schedule_limit = FormField(ScheduleLimitForm)
|
||||
|
||||
time_between_check_use_default = BooleanField('Use global settings for time between check', default=False)
|
||||
|
||||
include_filters = StringListField('CSS/JSONPath/JQ/XPath Filters', [ValidateCSSJSONXPATHInput()], default='')
|
||||
|
||||
subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)])
|
||||
subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_json=False)])
|
||||
|
||||
extract_text = StringListField('Extract text', [ValidateListRegex()])
|
||||
|
||||
title = StringField('Title', default='')
|
||||
|
||||
ignore_text = StringListField('Ignore text', [ValidateListRegex()])
|
||||
ignore_text = StringListField('Ignore lines containing', [ValidateListRegex()])
|
||||
headers = StringDictKeyValue('Request headers')
|
||||
body = TextAreaField('Request body', [validators.Optional()])
|
||||
method = SelectField('Request method', choices=valid_method, default=default_method)
|
||||
ignore_status_codes = BooleanField('Ignore status codes (process non-2xx status codes as normal)', default=False)
|
||||
check_unique_lines = BooleanField('Only trigger when unique lines appear', default=False)
|
||||
check_unique_lines = BooleanField('Only trigger when unique lines appear in all history', default=False)
|
||||
remove_duplicate_lines = BooleanField('Remove duplicate lines of text', default=False)
|
||||
sort_text_alphabetically = BooleanField('Sort text alphabetically', default=False)
|
||||
trim_text_whitespace = BooleanField('Trim whitespace before and after text', default=False)
|
||||
|
||||
filter_text_added = BooleanField('Added lines', default=True)
|
||||
filter_text_replaced = BooleanField('Replaced/changed lines', default=True)
|
||||
filter_text_removed = BooleanField('Removed lines', default=True)
|
||||
|
||||
# @todo this class could be moved to its own text_json_diff_watchForm and this goes to restock_diff_Watchform perhaps
|
||||
in_stock_only = BooleanField('Only trigger when product goes BACK to in-stock', default=True)
|
||||
|
||||
trigger_text = StringListField('Trigger/wait for text', [validators.Optional(), ValidateListRegex()])
|
||||
if os.getenv("PLAYWRIGHT_DRIVER_URL"):
|
||||
browser_steps = FieldList(FormField(SingleBrowserStep), min_entries=10)
|
||||
text_should_not_be_present = StringListField('Block change-detection while text matches', [validators.Optional(), ValidateListRegex()])
|
||||
webdriver_js_execute_code = TextAreaField('Execute JavaScript before change detection', render_kw={"rows": "5"}, validators=[validators.Optional()])
|
||||
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button button-small pure-button-primary"})
|
||||
|
||||
proxy = RadioField('Proxy')
|
||||
filter_failure_notification_send = BooleanField(
|
||||
@@ -493,10 +596,17 @@ class watchForm(commonSettingsForm):
|
||||
notification_muted = BooleanField('Notifications Muted / Off', default=False)
|
||||
notification_screenshot = BooleanField('Attach screenshot to notification (where possible)', default=False)
|
||||
|
||||
def extra_tab_content(self):
|
||||
return None
|
||||
|
||||
def extra_form_content(self):
|
||||
return None
|
||||
|
||||
def validate(self, **kwargs):
|
||||
if not super().validate():
|
||||
return False
|
||||
|
||||
from changedetectionio.safe_jinja import render as jinja_render
|
||||
result = True
|
||||
|
||||
# Fail form validation when a body is set for a GET
|
||||
@@ -506,13 +616,64 @@ class watchForm(commonSettingsForm):
|
||||
|
||||
# Attempt to validate jinja2 templates in the URL
|
||||
try:
|
||||
from changedetectionio.safe_jinja import render as jinja_render
|
||||
jinja_render(template_str=self.url.data)
|
||||
except Exception as e:
|
||||
self.url.errors.append('Invalid template syntax')
|
||||
except ModuleNotFoundError as e:
|
||||
# incase jinja2_time or others is missing
|
||||
logger.error(e)
|
||||
self.url.errors.append(f'Invalid template syntax configuration: {e}')
|
||||
result = False
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
self.url.errors.append(f'Invalid template syntax: {e}')
|
||||
result = False
|
||||
|
||||
# Attempt to validate jinja2 templates in the body
|
||||
if self.body.data and self.body.data.strip():
|
||||
try:
|
||||
jinja_render(template_str=self.body.data)
|
||||
except ModuleNotFoundError as e:
|
||||
# incase jinja2_time or others is missing
|
||||
logger.error(e)
|
||||
self.body.errors.append(f'Invalid template syntax configuration: {e}')
|
||||
result = False
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
self.body.errors.append(f'Invalid template syntax: {e}')
|
||||
result = False
|
||||
|
||||
# Attempt to validate jinja2 templates in the headers
|
||||
if len(self.headers.data) > 0:
|
||||
try:
|
||||
for header, value in self.headers.data.items():
|
||||
jinja_render(template_str=value)
|
||||
except ModuleNotFoundError as e:
|
||||
# incase jinja2_time or others is missing
|
||||
logger.error(e)
|
||||
self.headers.errors.append(f'Invalid template syntax configuration: {e}')
|
||||
result = False
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
self.headers.errors.append(f'Invalid template syntax in "{header}" header: {e}')
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
formdata=None,
|
||||
obj=None,
|
||||
prefix="",
|
||||
data=None,
|
||||
meta=None,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(formdata, obj, prefix, data, meta, **kwargs)
|
||||
if kwargs and kwargs.get('default_system_settings'):
|
||||
default_tz = kwargs.get('default_system_settings').get('application', {}).get('timezone')
|
||||
if default_tz:
|
||||
self.time_schedule_limit.form.timezone.render_kw['placeholder'] = default_tz
|
||||
|
||||
|
||||
|
||||
class SingleExtraProxy(Form):
|
||||
|
||||
@@ -534,6 +695,7 @@ class DefaultUAInputForm(Form):
|
||||
# datastore.data['settings']['requests']..
|
||||
class globalSettingsRequestForm(Form):
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
time_schedule_limit = FormField(ScheduleLimitForm)
|
||||
proxy = RadioField('Proxy')
|
||||
jitter_seconds = IntegerField('Random jitter seconds ± check',
|
||||
render_kw={"style": "width: 5em;"},
|
||||
@@ -562,7 +724,7 @@ class globalSettingsApplicationForm(commonSettingsForm):
|
||||
empty_pages_are_a_change = BooleanField('Treat empty pages as a change?', default=False)
|
||||
fetch_backend = RadioField('Fetch Method', default="html_requests", choices=content_fetchers.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
global_ignore_text = StringListField('Ignore Text', [ValidateListRegex()])
|
||||
global_subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)])
|
||||
global_subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_json=False)])
|
||||
ignore_whitespace = BooleanField('Ignore whitespace')
|
||||
password = SaltyPasswordField()
|
||||
pager_size = IntegerField('Pager size',
|
||||
@@ -584,10 +746,15 @@ class globalSettingsForm(Form):
|
||||
# Define these as FormFields/"sub forms", this way it matches the JSON storage
|
||||
# datastore.data['settings']['application']..
|
||||
# datastore.data['settings']['requests']..
|
||||
def __init__(self, formdata=None, obj=None, prefix="", data=None, meta=None, **kwargs):
|
||||
super().__init__(formdata, obj, prefix, data, meta, **kwargs)
|
||||
self.application.notification_body.extra_notification_tokens = kwargs.get('extra_notification_tokens', {})
|
||||
self.application.notification_title.extra_notification_tokens = kwargs.get('extra_notification_tokens', {})
|
||||
self.application.notification_urls.extra_notification_tokens = kwargs.get('extra_notification_tokens', {})
|
||||
|
||||
requests = FormField(globalSettingsRequestForm)
|
||||
application = FormField(globalSettingsApplicationForm)
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button button-small pure-button-primary"})
|
||||
|
||||
|
||||
class extractDataForm(Form):
|
||||
|
||||
@@ -1,18 +1,13 @@
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from inscriptis import get_text
|
||||
from jsonpath_ng.ext import parse
|
||||
from typing import List
|
||||
from inscriptis.model.config import ParserConfig
|
||||
from xml.sax.saxutils import escape as xml_escape
|
||||
from lxml import etree
|
||||
import json
|
||||
import re
|
||||
|
||||
|
||||
# HTML added to be sure each result matching a filter (.example) gets converted to a new line by Inscriptis
|
||||
TEXT_FILTER_LIST_LINE_SUFFIX = "<br>"
|
||||
|
||||
TRANSLATE_WHITESPACE_TABLE = str.maketrans('', '', '\r\n\t ')
|
||||
PERL_STYLE_REGEX = r'^/(.*?)/([a-z]*)?$'
|
||||
|
||||
# 'price' , 'lowPrice', 'highPrice' are usually under here
|
||||
# All of those may or may not appear on different websites - I didnt find a way todo case-insensitive searching here
|
||||
LD_JSON_PRODUCT_OFFER_SELECTORS = ["json:$..offers", "json:$..Offers"]
|
||||
@@ -39,6 +34,7 @@ def perl_style_slash_enclosed_regex_to_options(regex):
|
||||
|
||||
# Given a CSS Rule, and a blob of HTML, return the blob of HTML that matches
|
||||
def include_filters(include_filters, html_content, append_pretty_line_formatting=False):
|
||||
from bs4 import BeautifulSoup
|
||||
soup = BeautifulSoup(html_content, "html.parser")
|
||||
html_block = ""
|
||||
r = soup.select(include_filters, separator="")
|
||||
@@ -56,16 +52,67 @@ def include_filters(include_filters, html_content, append_pretty_line_formatting
|
||||
return html_block
|
||||
|
||||
def subtractive_css_selector(css_selector, html_content):
|
||||
from bs4 import BeautifulSoup
|
||||
soup = BeautifulSoup(html_content, "html.parser")
|
||||
for item in soup.select(css_selector):
|
||||
|
||||
# So that the elements dont shift their index, build a list of elements here which will be pointers to their place in the DOM
|
||||
elements_to_remove = soup.select(css_selector)
|
||||
|
||||
# Then, remove them in a separate loop
|
||||
for item in elements_to_remove:
|
||||
item.decompose()
|
||||
|
||||
return str(soup)
|
||||
|
||||
def subtractive_xpath_selector(selectors: List[str], html_content: str) -> str:
|
||||
# Parse the HTML content using lxml
|
||||
html_tree = etree.HTML(html_content)
|
||||
|
||||
# First, collect all elements to remove
|
||||
elements_to_remove = []
|
||||
|
||||
# Iterate over the list of XPath selectors
|
||||
for selector in selectors:
|
||||
# Collect elements for each selector
|
||||
elements_to_remove.extend(html_tree.xpath(selector))
|
||||
|
||||
# Then, remove them in a separate loop
|
||||
for element in elements_to_remove:
|
||||
if element.getparent() is not None: # Ensure the element has a parent before removing
|
||||
element.getparent().remove(element)
|
||||
|
||||
# Convert the modified HTML tree back to a string
|
||||
modified_html = etree.tostring(html_tree, method="html").decode("utf-8")
|
||||
return modified_html
|
||||
|
||||
|
||||
def element_removal(selectors: List[str], html_content):
|
||||
"""Joins individual filters into one css filter."""
|
||||
selector = ",".join(selectors)
|
||||
return subtractive_css_selector(selector, html_content)
|
||||
"""Removes elements that match a list of CSS or XPath selectors."""
|
||||
modified_html = html_content
|
||||
css_selectors = []
|
||||
xpath_selectors = []
|
||||
|
||||
for selector in selectors:
|
||||
if selector.startswith(('xpath:', 'xpath1:', '//')):
|
||||
# Handle XPath selectors separately
|
||||
xpath_selector = selector.removeprefix('xpath:').removeprefix('xpath1:')
|
||||
xpath_selectors.append(xpath_selector)
|
||||
else:
|
||||
# Collect CSS selectors as one "hit", see comment in subtractive_css_selector
|
||||
css_selectors.append(selector.strip().strip(","))
|
||||
|
||||
if xpath_selectors:
|
||||
modified_html = subtractive_xpath_selector(xpath_selectors, modified_html)
|
||||
|
||||
if css_selectors:
|
||||
# Remove duplicates, then combine all CSS selectors into one string, separated by commas
|
||||
# This stops the elements index shifting
|
||||
unique_selectors = list(set(css_selectors)) # Ensure uniqueness
|
||||
combined_css_selector = " , ".join(unique_selectors)
|
||||
modified_html = subtractive_css_selector(combined_css_selector, modified_html)
|
||||
|
||||
|
||||
return modified_html
|
||||
|
||||
def elementpath_tostring(obj):
|
||||
"""
|
||||
@@ -181,6 +228,7 @@ def xpath1_filter(xpath_filter, html_content, append_pretty_line_formatting=Fals
|
||||
|
||||
# Extract/find element
|
||||
def extract_element(find='title', html_content=''):
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
#Re #106, be sure to handle when its not found
|
||||
element_text = None
|
||||
@@ -194,6 +242,8 @@ def extract_element(find='title', html_content=''):
|
||||
|
||||
#
|
||||
def _parse_json(json_data, json_filter):
|
||||
from jsonpath_ng.ext import parse
|
||||
|
||||
if json_filter.startswith("json:"):
|
||||
jsonpath_expression = parse(json_filter.replace('json:', ''))
|
||||
match = jsonpath_expression.find(json_data)
|
||||
@@ -242,8 +292,10 @@ def _get_stripped_text_from_json_match(match):
|
||||
# json_filter - ie json:$..price
|
||||
# ensure_is_ldjson_info_type - str "product", optional, "@type == product" (I dont know how to do that as a json selector)
|
||||
def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None):
|
||||
stripped_text_from_html = False
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
stripped_text_from_html = False
|
||||
# https://github.com/dgtlmoon/changedetection.io/pull/2041#issuecomment-1848397161w
|
||||
# Try to parse/filter out the JSON, if we get some parser error, then maybe it's embedded within HTML tags
|
||||
try:
|
||||
stripped_text_from_html = _parse_json(json.loads(content), json_filter)
|
||||
@@ -282,17 +334,19 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None
|
||||
if isinstance(json_data, dict):
|
||||
# If it has LD JSON 'key' @type, and @type is 'product', and something was found for the search
|
||||
# (Some sites have multiple of the same ld+json @type='product', but some have the review part, some have the 'price' part)
|
||||
# @type could also be a list (Product, SubType)
|
||||
# @type could also be a list although non-standard ("@type": ["Product", "SubType"],)
|
||||
# LD_JSON auto-extract also requires some content PLUS the ldjson to be present
|
||||
# 1833 - could be either str or dict, should not be anything else
|
||||
if json_data.get('@type') and stripped_text_from_html:
|
||||
try:
|
||||
if json_data.get('@type') == str or json_data.get('@type') == dict:
|
||||
types = [json_data.get('@type')] if isinstance(json_data.get('@type'), str) else json_data.get('@type')
|
||||
if ensure_is_ldjson_info_type.lower() in [x.lower().strip() for x in types]:
|
||||
break
|
||||
except:
|
||||
continue
|
||||
|
||||
t = json_data.get('@type')
|
||||
if t and stripped_text_from_html:
|
||||
|
||||
if isinstance(t, str) and t.lower() == ensure_is_ldjson_info_type.lower():
|
||||
break
|
||||
# The non-standard part, some have a list
|
||||
elif isinstance(t, list):
|
||||
if ensure_is_ldjson_info_type.lower() in [x.lower().strip() for x in t]:
|
||||
break
|
||||
|
||||
elif stripped_text_from_html:
|
||||
break
|
||||
@@ -307,6 +361,7 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None
|
||||
# - "line numbers" return a list of line numbers that match (int list)
|
||||
#
|
||||
# wordlist - list of regex's (str) or words (str)
|
||||
# Preserves all linefeeds and other whitespacing, its not the job of this to remove that
|
||||
def strip_ignore_text(content, wordlist, mode="content"):
|
||||
i = 0
|
||||
output = []
|
||||
@@ -322,34 +377,33 @@ def strip_ignore_text(content, wordlist, mode="content"):
|
||||
else:
|
||||
ignore_text.append(k.strip())
|
||||
|
||||
for line in content.splitlines():
|
||||
for line in content.splitlines(keepends=True):
|
||||
i += 1
|
||||
# Always ignore blank lines in this mode. (when this function gets called)
|
||||
got_match = False
|
||||
if len(line.strip()):
|
||||
for l in ignore_text:
|
||||
if l.lower() in line.lower():
|
||||
for l in ignore_text:
|
||||
if l.lower() in line.lower():
|
||||
got_match = True
|
||||
|
||||
if not got_match:
|
||||
for r in ignore_regex:
|
||||
if r.search(line):
|
||||
got_match = True
|
||||
|
||||
if not got_match:
|
||||
for r in ignore_regex:
|
||||
if r.search(line):
|
||||
got_match = True
|
||||
|
||||
if not got_match:
|
||||
# Not ignored
|
||||
output.append(line.encode('utf8'))
|
||||
else:
|
||||
ignored_line_numbers.append(i)
|
||||
|
||||
if not got_match:
|
||||
# Not ignored, and should preserve "keepends"
|
||||
output.append(line)
|
||||
else:
|
||||
ignored_line_numbers.append(i)
|
||||
|
||||
# Used for finding out what to highlight
|
||||
if mode == "line numbers":
|
||||
return ignored_line_numbers
|
||||
|
||||
return "\n".encode('utf8').join(output)
|
||||
return ''.join(output)
|
||||
|
||||
def cdata_in_document_to_text(html_content: str, render_anchor_tag_content=False) -> str:
|
||||
from xml.sax.saxutils import escape as xml_escape
|
||||
pattern = '<!\[CDATA\[(\s*(?:.(?<!\]\]>)\s*)*)\]\]>'
|
||||
def repl(m):
|
||||
text = m.group(1)
|
||||
@@ -358,6 +412,9 @@ def cdata_in_document_to_text(html_content: str, render_anchor_tag_content=False
|
||||
return re.sub(pattern, repl, html_content)
|
||||
|
||||
def html_to_text(html_content: str, render_anchor_tag_content=False, is_rss=False) -> str:
|
||||
from inscriptis import get_text
|
||||
from inscriptis.model.config import ParserConfig
|
||||
|
||||
"""Converts html string to a string with just the text. If ignoring
|
||||
rendering anchor tag content is enable, anchor tag content are also
|
||||
included in the text
|
||||
@@ -395,22 +452,23 @@ def html_to_text(html_content: str, render_anchor_tag_content=False, is_rss=Fals
|
||||
|
||||
# Does LD+JSON exist with a @type=='product' and a .price set anywhere?
|
||||
def has_ldjson_product_info(content):
|
||||
pricing_data = ''
|
||||
|
||||
try:
|
||||
if not 'application/ld+json' in content:
|
||||
return False
|
||||
|
||||
for filter in LD_JSON_PRODUCT_OFFER_SELECTORS:
|
||||
pricing_data += extract_json_as_string(content=content,
|
||||
json_filter=filter,
|
||||
ensure_is_ldjson_info_type="product")
|
||||
lc = content.lower()
|
||||
if 'application/ld+json' in lc and lc.count('"price"') == 1 and '"pricecurrency"' in lc:
|
||||
return True
|
||||
|
||||
# On some pages this is really terribly expensive when they dont really need it
|
||||
# (For example you never want price monitoring, but this runs on every watch to suggest it)
|
||||
# for filter in LD_JSON_PRODUCT_OFFER_SELECTORS:
|
||||
# pricing_data += extract_json_as_string(content=content,
|
||||
# json_filter=filter,
|
||||
# ensure_is_ldjson_info_type="product")
|
||||
except Exception as e:
|
||||
# Totally fine
|
||||
# OK too
|
||||
return False
|
||||
x=bool(pricing_data)
|
||||
return x
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
def workarounds_for_obfuscations(content):
|
||||
|
||||
@@ -52,7 +52,8 @@ class model(dict):
|
||||
'schema_version' : 0,
|
||||
'shared_diff_access': False,
|
||||
'webdriver_delay': None , # Extra delay in seconds before extracting text
|
||||
'tags': {} #@todo use Tag.model initialisers
|
||||
'tags': {}, #@todo use Tag.model initialisers
|
||||
'timezone': None, # Default IANA timezone name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,14 @@
|
||||
from .Watch import base_config
|
||||
import uuid
|
||||
|
||||
class model(dict):
|
||||
from changedetectionio.model import watch_base
|
||||
|
||||
|
||||
class model(watch_base):
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
|
||||
self.update(base_config)
|
||||
|
||||
self['uuid'] = str(uuid.uuid4())
|
||||
self['overrides_watch'] = kw.get('default', {}).get('overrides_watch')
|
||||
|
||||
if kw.get('default'):
|
||||
self.update(kw['default'])
|
||||
del kw['default']
|
||||
|
||||
|
||||
# Goes at the end so we update the default object with the initialiser
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from changedetectionio.safe_jinja import render as jinja_render
|
||||
|
||||
from . import watch_base
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from loguru import logger
|
||||
|
||||
from ..html_tools import TRANSLATE_WHITESPACE_TABLE
|
||||
|
||||
# Allowable protocols, protects against javascript: etc
|
||||
# file:// is further checked by ALLOW_FILE_URI
|
||||
SAFE_PROTOCOL_REGEX='^(http|https|ftp|file):'
|
||||
@@ -15,69 +15,6 @@ SAFE_PROTOCOL_REGEX='^(http|https|ftp|file):'
|
||||
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 3))
|
||||
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
|
||||
|
||||
from changedetectionio.notification import (
|
||||
default_notification_format_for_watch
|
||||
)
|
||||
|
||||
base_config = {
|
||||
'body': None,
|
||||
'browser_steps': [],
|
||||
'browser_steps_last_error_step': None,
|
||||
'check_unique_lines': False, # On change-detected, compare against all history if its something new
|
||||
'check_count': 0,
|
||||
'date_created': None,
|
||||
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
|
||||
'extract_text': [], # Extract text by regex after filters
|
||||
'extract_title_as_title': False,
|
||||
'fetch_backend': 'system', # plaintext, playwright etc
|
||||
'fetch_time': 0.0,
|
||||
'processor': 'text_json_diff', # could be restock_diff or others from .processors
|
||||
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
|
||||
'filter_text_added': True,
|
||||
'filter_text_replaced': True,
|
||||
'filter_text_removed': True,
|
||||
'has_ldjson_price_data': None,
|
||||
'track_ldjson_price_data': None,
|
||||
'headers': {}, # Extra headers to send
|
||||
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
'in_stock' : None,
|
||||
'in_stock_only' : True, # Only trigger change on going to instock from out-of-stock
|
||||
'include_filters': [],
|
||||
'last_checked': 0,
|
||||
'last_error': False,
|
||||
'last_viewed': 0, # history key value of the last viewed via the [diff] link
|
||||
'method': 'GET',
|
||||
'notification_alert_count': 0,
|
||||
# Custom notification content
|
||||
'notification_body': None,
|
||||
'notification_format': default_notification_format_for_watch,
|
||||
'notification_muted': False,
|
||||
'notification_title': None,
|
||||
'notification_screenshot': False, # Include the latest screenshot if available and supported by the apprise URL
|
||||
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
|
||||
'paused': False,
|
||||
'previous_md5': False,
|
||||
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
|
||||
'proxy': None, # Preferred proxy connection
|
||||
'remote_server_reply': None, # From 'server' reply header
|
||||
'sort_text_alphabetically': False,
|
||||
'subtractive_selectors': [],
|
||||
'tag': '', # Old system of text name for a tag, to be removed
|
||||
'tags': [], # list of UUIDs to App.Tags
|
||||
'text_should_not_be_present': [], # Text that should not present
|
||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||
# Requires setting to None on submit if it's the same as the default
|
||||
# Should be all None by default, so we use the system default in this case.
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None},
|
||||
'time_between_check_use_default': True,
|
||||
'title': None,
|
||||
'trigger_text': [], # List of text or regex to wait for until a change is detected
|
||||
'url': '',
|
||||
'uuid': str(uuid.uuid4()),
|
||||
'webdriver_delay': None,
|
||||
'webdriver_js_execute_code': None, # Run before change-detection
|
||||
}
|
||||
|
||||
|
||||
def is_safe_url(test_url):
|
||||
# See https://github.com/dgtlmoon/changedetection.io/issues/1358
|
||||
@@ -94,30 +31,27 @@ def is_safe_url(test_url):
|
||||
|
||||
return True
|
||||
|
||||
class model(dict):
|
||||
|
||||
class model(watch_base):
|
||||
__newest_history_key = None
|
||||
__history_n = 0
|
||||
jitter_seconds = 0
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
|
||||
self.update(base_config)
|
||||
self.__datastore_path = kw['datastore_path']
|
||||
|
||||
self['uuid'] = str(uuid.uuid4())
|
||||
|
||||
del kw['datastore_path']
|
||||
|
||||
self.__datastore_path = kw.get('datastore_path')
|
||||
if kw.get('datastore_path'):
|
||||
del kw['datastore_path']
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
if kw.get('default'):
|
||||
self.update(kw['default'])
|
||||
del kw['default']
|
||||
|
||||
if self.get('default'):
|
||||
del self['default']
|
||||
|
||||
# Be sure the cached timestamp is ready
|
||||
bump = self.history
|
||||
|
||||
# Goes at the end so we update the default object with the initialiser
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
|
||||
@property
|
||||
def viewed(self):
|
||||
# Don't return viewed when last_viewed is 0 and newest_key is 0
|
||||
@@ -155,8 +89,39 @@ class model(dict):
|
||||
|
||||
if ready_url.startswith('source:'):
|
||||
ready_url=ready_url.replace('source:', '')
|
||||
|
||||
# Also double check it after any Jinja2 formatting just incase
|
||||
if not is_safe_url(ready_url):
|
||||
return 'DISABLED'
|
||||
return ready_url
|
||||
|
||||
def clear_watch(self):
|
||||
import pathlib
|
||||
|
||||
# JSON Data, Screenshots, Textfiles (history index and snapshots), HTML in the future etc
|
||||
for item in pathlib.Path(str(self.watch_data_dir)).rglob("*.*"):
|
||||
os.unlink(item)
|
||||
|
||||
# Force the attr to recalculate
|
||||
bump = self.history
|
||||
|
||||
# Do this last because it will trigger a recheck due to last_checked being zero
|
||||
self.update({
|
||||
'browser_steps_last_error_step': None,
|
||||
'check_count': 0,
|
||||
'fetch_time': 0.0,
|
||||
'has_ldjson_price_data': None,
|
||||
'last_checked': 0,
|
||||
'last_error': False,
|
||||
'last_notification_error': False,
|
||||
'last_viewed': 0,
|
||||
'previous_md5': False,
|
||||
'previous_md5_before_filters': False,
|
||||
'remote_server_reply': None,
|
||||
'track_ldjson_price_data': None
|
||||
})
|
||||
return
|
||||
|
||||
@property
|
||||
def is_source_type_url(self):
|
||||
return self.get('url', '').startswith('source:')
|
||||
@@ -213,6 +178,10 @@ class model(dict):
|
||||
"""
|
||||
tmp_history = {}
|
||||
|
||||
# In the case we are only using the watch for processing without history
|
||||
if not self.watch_data_dir:
|
||||
return []
|
||||
|
||||
# Read the history file as a dict
|
||||
fname = os.path.join(self.watch_data_dir, "history.txt")
|
||||
if os.path.isfile(fname):
|
||||
@@ -258,6 +227,13 @@ class model(dict):
|
||||
|
||||
return has_browser_steps
|
||||
|
||||
@property
|
||||
def has_restock_info(self):
|
||||
if self.get('restock') and self['restock'].get('in_stock') != None:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# Returns the newest key, but if theres only 1 record, then it's counted as not being new, so return 0.
|
||||
@property
|
||||
def newest_history_key(self):
|
||||
@@ -342,13 +318,13 @@ class model(dict):
|
||||
dest = os.path.join(self.watch_data_dir, snapshot_fname)
|
||||
if not os.path.exists(dest):
|
||||
with open(dest, 'wb') as f:
|
||||
f.write(brotli.compress(contents, mode=brotli.MODE_TEXT))
|
||||
f.write(brotli.compress(contents.encode('utf-8'), mode=brotli.MODE_TEXT))
|
||||
else:
|
||||
snapshot_fname = f"{snapshot_id}.txt"
|
||||
dest = os.path.join(self.watch_data_dir, snapshot_fname)
|
||||
if not os.path.exists(dest):
|
||||
with open(dest, 'wb') as f:
|
||||
f.write(contents)
|
||||
f.write(contents.encode('utf-8'))
|
||||
|
||||
# Append to index
|
||||
# @todo check last char was \n
|
||||
@@ -363,7 +339,6 @@ class model(dict):
|
||||
# @todo bump static cache of the last timestamp so we dont need to examine the file to set a proper ''viewed'' status
|
||||
return snapshot_fname
|
||||
|
||||
@property
|
||||
@property
|
||||
def has_empty_checktime(self):
|
||||
# using all() + dictionary comprehension
|
||||
@@ -380,14 +355,32 @@ class model(dict):
|
||||
return seconds
|
||||
|
||||
# Iterate over all history texts and see if something new exists
|
||||
def lines_contain_something_unique_compared_to_history(self, lines: list):
|
||||
local_lines = set([l.decode('utf-8').strip().lower() for l in lines])
|
||||
# Always applying .strip() to start/end but optionally replace any other whitespace
|
||||
def lines_contain_something_unique_compared_to_history(self, lines: list, ignore_whitespace=False):
|
||||
local_lines = []
|
||||
if lines:
|
||||
if ignore_whitespace:
|
||||
if isinstance(lines[0], str): # Can be either str or bytes depending on what was on the disk
|
||||
local_lines = set([l.translate(TRANSLATE_WHITESPACE_TABLE).lower() for l in lines])
|
||||
else:
|
||||
local_lines = set([l.decode('utf-8').translate(TRANSLATE_WHITESPACE_TABLE).lower() for l in lines])
|
||||
else:
|
||||
if isinstance(lines[0], str): # Can be either str or bytes depending on what was on the disk
|
||||
local_lines = set([l.strip().lower() for l in lines])
|
||||
else:
|
||||
local_lines = set([l.decode('utf-8').strip().lower() for l in lines])
|
||||
|
||||
|
||||
# Compare each lines (set) against each history text file (set) looking for something new..
|
||||
existing_history = set({})
|
||||
for k, v in self.history.items():
|
||||
content = self.get_history_snapshot(k)
|
||||
alist = set([line.strip().lower() for line in content.splitlines()])
|
||||
|
||||
if ignore_whitespace:
|
||||
alist = set([line.translate(TRANSLATE_WHITESPACE_TABLE).lower() for line in content.splitlines()])
|
||||
else:
|
||||
alist = set([line.strip().lower() for line in content.splitlines()])
|
||||
|
||||
existing_history = existing_history.union(alist)
|
||||
|
||||
# Check that everything in local_lines(new stuff) already exists in existing_history - it should
|
||||
@@ -431,8 +424,8 @@ class model(dict):
|
||||
@property
|
||||
def watch_data_dir(self):
|
||||
# The base dir of the watch data
|
||||
return os.path.join(self.__datastore_path, self['uuid'])
|
||||
|
||||
return os.path.join(self.__datastore_path, self['uuid']) if self.__datastore_path else None
|
||||
|
||||
def get_error_text(self):
|
||||
"""Return the text saved from a previous request that resulted in a non-200 error"""
|
||||
fname = os.path.join(self.watch_data_dir, "last-error.txt")
|
||||
@@ -467,6 +460,17 @@ class model(dict):
|
||||
def toggle_mute(self):
|
||||
self['notification_muted'] ^= True
|
||||
|
||||
def extra_notification_token_values(self):
|
||||
# Used for providing extra tokens
|
||||
# return {'widget': 555}
|
||||
return {}
|
||||
|
||||
def extra_notification_token_placeholder_info(self):
|
||||
# Used for providing extra tokens
|
||||
# return [('widget', "Get widget amounts")]
|
||||
return []
|
||||
|
||||
|
||||
def extract_regex_from_all_history(self, regex):
|
||||
import csv
|
||||
import re
|
||||
@@ -533,16 +537,17 @@ class model(dict):
|
||||
|
||||
def save_xpath_data(self, data, as_error=False):
|
||||
import json
|
||||
import zlib
|
||||
|
||||
if as_error:
|
||||
target_path = os.path.join(self.watch_data_dir, "elements-error.json")
|
||||
target_path = os.path.join(str(self.watch_data_dir), "elements-error.deflate")
|
||||
else:
|
||||
target_path = os.path.join(self.watch_data_dir, "elements.json")
|
||||
target_path = os.path.join(str(self.watch_data_dir), "elements.deflate")
|
||||
|
||||
self.ensure_data_dir_exists()
|
||||
|
||||
with open(target_path, 'w') as f:
|
||||
f.write(json.dumps(data))
|
||||
with open(target_path, 'wb') as f:
|
||||
f.write(zlib.compress(json.dumps(data).encode()))
|
||||
f.close()
|
||||
|
||||
# Save as PNG, PNG is larger but better for doing visual diff in the future
|
||||
|
||||
@@ -0,0 +1,135 @@
|
||||
import os
|
||||
import uuid
|
||||
|
||||
from changedetectionio import strtobool
|
||||
from changedetectionio.notification import default_notification_format_for_watch
|
||||
|
||||
class watch_base(dict):
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
self.update({
|
||||
# Custom notification content
|
||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||
# Requires setting to None on submit if it's the same as the default
|
||||
# Should be all None by default, so we use the system default in this case.
|
||||
'body': None,
|
||||
'browser_steps': [],
|
||||
'browser_steps_last_error_step': None,
|
||||
'check_count': 0,
|
||||
'check_unique_lines': False, # On change-detected, compare against all history if its something new
|
||||
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
|
||||
'content-type': None,
|
||||
'date_created': None,
|
||||
'extract_text': [], # Extract text by regex after filters
|
||||
'extract_title_as_title': False,
|
||||
'fetch_backend': 'system', # plaintext, playwright etc
|
||||
'fetch_time': 0.0,
|
||||
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
|
||||
'filter_text_added': True,
|
||||
'filter_text_removed': True,
|
||||
'filter_text_replaced': True,
|
||||
'follow_price_changes': True,
|
||||
'has_ldjson_price_data': None,
|
||||
'headers': {}, # Extra headers to send
|
||||
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
'in_stock_only': True, # Only trigger change on going to instock from out-of-stock
|
||||
'include_filters': [],
|
||||
'last_checked': 0,
|
||||
'last_error': False,
|
||||
'last_viewed': 0, # history key value of the last viewed via the [diff] link
|
||||
'method': 'GET',
|
||||
'notification_alert_count': 0,
|
||||
'notification_body': None,
|
||||
'notification_format': default_notification_format_for_watch,
|
||||
'notification_muted': False,
|
||||
'notification_screenshot': False, # Include the latest screenshot if available and supported by the apprise URL
|
||||
'notification_title': None,
|
||||
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
|
||||
'paused': False,
|
||||
'previous_md5': False,
|
||||
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
|
||||
'processor': 'text_json_diff', # could be restock_diff or others from .processors
|
||||
'price_change_threshold_percent': None,
|
||||
'proxy': None, # Preferred proxy connection
|
||||
'remote_server_reply': None, # From 'server' reply header
|
||||
'sort_text_alphabetically': False,
|
||||
'subtractive_selectors': [],
|
||||
'tag': '', # Old system of text name for a tag, to be removed
|
||||
'tags': [], # list of UUIDs to App.Tags
|
||||
'text_should_not_be_present': [], # Text that should not present
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None},
|
||||
'time_between_check_use_default': True,
|
||||
"time_schedule_limit": {
|
||||
"enabled": False,
|
||||
"monday": {
|
||||
"enabled": True,
|
||||
"start_time": "00:00",
|
||||
"duration": {
|
||||
"hours": "24",
|
||||
"minutes": "00"
|
||||
}
|
||||
},
|
||||
"tuesday": {
|
||||
"enabled": True,
|
||||
"start_time": "00:00",
|
||||
"duration": {
|
||||
"hours": "24",
|
||||
"minutes": "00"
|
||||
}
|
||||
},
|
||||
"wednesday": {
|
||||
"enabled": True,
|
||||
"start_time": "00:00",
|
||||
"duration": {
|
||||
"hours": "24",
|
||||
"minutes": "00"
|
||||
}
|
||||
},
|
||||
"thursday": {
|
||||
"enabled": True,
|
||||
"start_time": "00:00",
|
||||
"duration": {
|
||||
"hours": "24",
|
||||
"minutes": "00"
|
||||
}
|
||||
},
|
||||
"friday": {
|
||||
"enabled": True,
|
||||
"start_time": "00:00",
|
||||
"duration": {
|
||||
"hours": "24",
|
||||
"minutes": "00"
|
||||
}
|
||||
},
|
||||
"saturday": {
|
||||
"enabled": True,
|
||||
"start_time": "00:00",
|
||||
"duration": {
|
||||
"hours": "24",
|
||||
"minutes": "00"
|
||||
}
|
||||
},
|
||||
"sunday": {
|
||||
"enabled": True,
|
||||
"start_time": "00:00",
|
||||
"duration": {
|
||||
"hours": "24",
|
||||
"minutes": "00"
|
||||
}
|
||||
},
|
||||
},
|
||||
'title': None,
|
||||
'track_ldjson_price_data': None,
|
||||
'trim_text_whitespace': False,
|
||||
'remove_duplicate_lines': False,
|
||||
'trigger_text': [], # List of text or regex to wait for until a change is detected
|
||||
'url': '',
|
||||
'uuid': str(uuid.uuid4()),
|
||||
'webdriver_delay': None,
|
||||
'webdriver_js_execute_code': None, # Run before change-detection
|
||||
})
|
||||
|
||||
super(watch_base, self).__init__(*arg, **kw)
|
||||
|
||||
if self.get('default'):
|
||||
del self['default']
|
||||
@@ -1,9 +1,10 @@
|
||||
import apprise
|
||||
|
||||
import time
|
||||
from apprise import NotifyFormat
|
||||
import json
|
||||
import apprise
|
||||
from loguru import logger
|
||||
|
||||
|
||||
valid_tokens = {
|
||||
'base_url': '',
|
||||
'current_snapshot': '',
|
||||
@@ -22,7 +23,7 @@ valid_tokens = {
|
||||
}
|
||||
|
||||
default_notification_format_for_watch = 'System default'
|
||||
default_notification_format = 'Text'
|
||||
default_notification_format = 'HTML Color'
|
||||
default_notification_body = '{{watch_url}} had a change.\n---\n{{diff}}\n---\n'
|
||||
default_notification_title = 'ChangeDetection.io Notification - {{watch_url}}'
|
||||
|
||||
@@ -30,90 +31,16 @@ valid_notification_formats = {
|
||||
'Text': NotifyFormat.TEXT,
|
||||
'Markdown': NotifyFormat.MARKDOWN,
|
||||
'HTML': NotifyFormat.HTML,
|
||||
'HTML Color': 'htmlcolor',
|
||||
# Used only for editing a watch (not for global)
|
||||
default_notification_format_for_watch: default_notification_format_for_watch
|
||||
}
|
||||
|
||||
# include the decorator
|
||||
from apprise.decorators import notify
|
||||
|
||||
@notify(on="delete")
|
||||
@notify(on="deletes")
|
||||
@notify(on="get")
|
||||
@notify(on="gets")
|
||||
@notify(on="post")
|
||||
@notify(on="posts")
|
||||
@notify(on="put")
|
||||
@notify(on="puts")
|
||||
def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs):
|
||||
import requests
|
||||
from apprise.utils import parse_url as apprise_parse_url
|
||||
from apprise import URLBase
|
||||
|
||||
url = kwargs['meta'].get('url')
|
||||
|
||||
if url.startswith('post'):
|
||||
r = requests.post
|
||||
elif url.startswith('get'):
|
||||
r = requests.get
|
||||
elif url.startswith('put'):
|
||||
r = requests.put
|
||||
elif url.startswith('delete'):
|
||||
r = requests.delete
|
||||
|
||||
url = url.replace('post://', 'http://')
|
||||
url = url.replace('posts://', 'https://')
|
||||
url = url.replace('put://', 'http://')
|
||||
url = url.replace('puts://', 'https://')
|
||||
url = url.replace('get://', 'http://')
|
||||
url = url.replace('gets://', 'https://')
|
||||
url = url.replace('put://', 'http://')
|
||||
url = url.replace('puts://', 'https://')
|
||||
url = url.replace('delete://', 'http://')
|
||||
url = url.replace('deletes://', 'https://')
|
||||
|
||||
headers = {}
|
||||
params = {}
|
||||
auth = None
|
||||
|
||||
# Convert /foobar?+some-header=hello to proper header dictionary
|
||||
results = apprise_parse_url(url)
|
||||
if results:
|
||||
# Add our headers that the user can potentially over-ride if they wish
|
||||
# to to our returned result set and tidy entries by unquoting them
|
||||
headers = {URLBase.unquote(x): URLBase.unquote(y)
|
||||
for x, y in results['qsd+'].items()}
|
||||
|
||||
# https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation
|
||||
# In Apprise, it relies on prefixing each request arg with "-", because it uses say &method=update as a flag for apprise
|
||||
# but here we are making straight requests, so we need todo convert this against apprise's logic
|
||||
for k, v in results['qsd'].items():
|
||||
if not k.strip('+-') in results['qsd+'].keys():
|
||||
params[URLBase.unquote(k)] = URLBase.unquote(v)
|
||||
|
||||
# Determine Authentication
|
||||
auth = ''
|
||||
if results.get('user') and results.get('password'):
|
||||
auth = (URLBase.unquote(results.get('user')), URLBase.unquote(results.get('user')))
|
||||
elif results.get('user'):
|
||||
auth = (URLBase.unquote(results.get('user')))
|
||||
|
||||
# Try to auto-guess if it's JSON
|
||||
try:
|
||||
json.loads(body)
|
||||
headers['Content-Type'] = 'application/json; charset=utf-8'
|
||||
except ValueError as e:
|
||||
pass
|
||||
|
||||
r(results.get('url'),
|
||||
auth=auth,
|
||||
data=body,
|
||||
headers=headers,
|
||||
params=params
|
||||
)
|
||||
|
||||
|
||||
def process_notification(n_object, datastore):
|
||||
# so that the custom endpoints are registered
|
||||
from changedetectionio.apprise_plugin import apprise_custom_api_call_wrapper
|
||||
|
||||
from .safe_jinja import render as jinja_render
|
||||
now = time.time()
|
||||
@@ -150,14 +77,21 @@ def process_notification(n_object, datastore):
|
||||
|
||||
# Get the notification body from datastore
|
||||
n_body = jinja_render(template_str=n_object.get('notification_body', ''), **notification_parameters)
|
||||
if n_object.get('notification_format', '').startswith('HTML'):
|
||||
n_body = n_body.replace("\n", '<br>')
|
||||
|
||||
n_title = jinja_render(template_str=n_object.get('notification_title', ''), **notification_parameters)
|
||||
|
||||
url = url.strip()
|
||||
if url.startswith('#'):
|
||||
logger.trace(f"Skipping commented out notification URL - {url}")
|
||||
continue
|
||||
|
||||
if not url:
|
||||
logger.warning(f"Process Notification: skipping empty notification URL.")
|
||||
continue
|
||||
|
||||
logger.info(">> Process Notification: AppRise notifying {}".format(url))
|
||||
logger.info(f">> Process Notification: AppRise notifying {url}")
|
||||
url = jinja_render(template_str=url, **notification_parameters)
|
||||
|
||||
# Re 323 - Limit discord length to their 2000 char limit total or it wont send.
|
||||
@@ -230,6 +164,7 @@ def process_notification(n_object, datastore):
|
||||
log_value = logs.getvalue()
|
||||
|
||||
if log_value and 'WARNING' in log_value or 'ERROR' in log_value:
|
||||
logger.critical(log_value)
|
||||
raise Exception(log_value)
|
||||
|
||||
# Return what was sent for better logging - after the for loop
|
||||
@@ -272,19 +207,18 @@ def create_notification_parameters(n_object, datastore):
|
||||
tokens.update(
|
||||
{
|
||||
'base_url': base_url,
|
||||
'current_snapshot': n_object.get('current_snapshot', ''),
|
||||
'diff': n_object.get('diff', ''), # Null default in the case we use a test
|
||||
'diff_added': n_object.get('diff_added', ''), # Null default in the case we use a test
|
||||
'diff_full': n_object.get('diff_full', ''), # Null default in the case we use a test
|
||||
'diff_patch': n_object.get('diff_patch', ''), # Null default in the case we use a test
|
||||
'diff_removed': n_object.get('diff_removed', ''), # Null default in the case we use a test
|
||||
'diff_url': diff_url,
|
||||
'preview_url': preview_url,
|
||||
'triggered_text': n_object.get('triggered_text', ''),
|
||||
'watch_tag': watch_tag if watch_tag is not None else '',
|
||||
'watch_title': watch_title if watch_title is not None else '',
|
||||
'watch_url': watch_url,
|
||||
'watch_uuid': uuid,
|
||||
})
|
||||
|
||||
# n_object will contain diff, diff_added etc etc
|
||||
tokens.update(n_object)
|
||||
|
||||
if uuid:
|
||||
tokens.update(datastore.data['watching'].get(uuid).extra_notification_token_values())
|
||||
|
||||
return tokens
|
||||
|
||||
@@ -8,4 +8,8 @@ The concept here is to be able to switch between different domain specific probl
|
||||
Some suggestions for the future
|
||||
|
||||
- `graphical`
|
||||
- `restock_and_price` - extract price AND stock text
|
||||
|
||||
## Todo
|
||||
|
||||
- Make each processor return a extra list of sub-processed (so you could configure a single processor in different ways)
|
||||
- move restock_diff to its own pip/github repo
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
from abc import abstractmethod
|
||||
from changedetectionio.content_fetchers.base import Fetcher
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from changedetectionio.model import Watch
|
||||
from copy import deepcopy
|
||||
from loguru import logger
|
||||
import hashlib
|
||||
import importlib
|
||||
import inspect
|
||||
import os
|
||||
import pkgutil
|
||||
import re
|
||||
|
||||
class difference_detection_processor():
|
||||
@@ -15,28 +18,33 @@ class difference_detection_processor():
|
||||
screenshot = None
|
||||
watch = None
|
||||
xpath_data = None
|
||||
preferred_proxy = None
|
||||
|
||||
def __init__(self, *args, datastore, watch_uuid, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.datastore = datastore
|
||||
self.watch = deepcopy(self.datastore.data['watching'].get(watch_uuid))
|
||||
# Generic fetcher that should be extended (requests, playwright etc)
|
||||
self.fetcher = Fetcher()
|
||||
|
||||
def call_browser(self, preferred_proxy_id=None):
|
||||
|
||||
def call_browser(self):
|
||||
from requests.structures import CaseInsensitiveDict
|
||||
# Protect against file:// access
|
||||
if re.search(r'^file://', self.watch.get('url', '').strip(), re.IGNORECASE):
|
||||
|
||||
url = self.watch.link
|
||||
|
||||
# Protect against file://, file:/ access, check the real "link" without any meta "source:" etc prepended.
|
||||
if re.search(r'^file:/', url.strip(), re.IGNORECASE):
|
||||
if not strtobool(os.getenv('ALLOW_FILE_URI', 'false')):
|
||||
raise Exception(
|
||||
"file:// type access is denied for security reasons."
|
||||
)
|
||||
|
||||
url = self.watch.link
|
||||
|
||||
# Requests, playwright, other browser via wss:// etc, fetch_extra_something
|
||||
prefer_fetch_backend = self.watch.get('fetch_backend', 'system')
|
||||
|
||||
# Proxy ID "key"
|
||||
preferred_proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=self.watch.get('uuid'))
|
||||
preferred_proxy_id = preferred_proxy_id if preferred_proxy_id else self.datastore.get_preferred_proxy_for_watch(uuid=self.watch.get('uuid'))
|
||||
|
||||
# Pluggable content self.fetcher
|
||||
if not prefer_fetch_backend or prefer_fetch_backend == 'system':
|
||||
@@ -94,6 +102,7 @@ class difference_detection_processor():
|
||||
self.fetcher.browser_steps_screenshot_path = os.path.join(self.datastore.datastore_path, self.watch.get('uuid'))
|
||||
|
||||
# Tweak the base config with the per-watch ones
|
||||
from changedetectionio.safe_jinja import render as jinja_render
|
||||
request_headers = CaseInsensitiveDict()
|
||||
|
||||
ua = self.datastore.data['settings']['requests'].get('default_ua')
|
||||
@@ -110,9 +119,15 @@ class difference_detection_processor():
|
||||
if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']:
|
||||
request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
|
||||
|
||||
for header_name in request_headers:
|
||||
request_headers.update({header_name: jinja_render(template_str=request_headers.get(header_name))})
|
||||
|
||||
timeout = self.datastore.data['settings']['requests'].get('timeout')
|
||||
|
||||
request_body = self.watch.get('body')
|
||||
if request_body:
|
||||
request_body = jinja_render(template_str=self.watch.get('body'))
|
||||
|
||||
request_method = self.watch.get('method')
|
||||
ignore_status_codes = self.watch.get('ignore_status_codes', False)
|
||||
|
||||
@@ -130,8 +145,18 @@ class difference_detection_processor():
|
||||
is_binary = self.watch.is_pdf
|
||||
|
||||
# And here we go! call the right browser with browser-specific settings
|
||||
self.fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, self.watch.get('include_filters'),
|
||||
is_binary=is_binary)
|
||||
empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
|
||||
|
||||
self.fetcher.run(url=url,
|
||||
timeout=timeout,
|
||||
request_headers=request_headers,
|
||||
request_body=request_body,
|
||||
request_method=request_method,
|
||||
ignore_status_codes=ignore_status_codes,
|
||||
current_include_filters=self.watch.get('include_filters'),
|
||||
is_binary=is_binary,
|
||||
empty_pages_are_a_change=empty_pages_are_a_change
|
||||
)
|
||||
|
||||
#@todo .quit here could go on close object, so we can run JS if change-detected
|
||||
self.fetcher.quit()
|
||||
@@ -139,7 +164,7 @@ class difference_detection_processor():
|
||||
# After init, call run_changedetection() which will do the actual change-detection
|
||||
|
||||
@abstractmethod
|
||||
def run_changedetection(self, watch: Watch, skip_when_checksum_same=True):
|
||||
def run_changedetection(self, watch):
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
some_data = 'xxxxx'
|
||||
update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()
|
||||
@@ -147,8 +172,83 @@ class difference_detection_processor():
|
||||
return changed_detected, update_obj, ''.encode('utf-8')
|
||||
|
||||
|
||||
def find_sub_packages(package_name):
|
||||
"""
|
||||
Find all sub-packages within the given package.
|
||||
|
||||
:param package_name: The name of the base package to scan for sub-packages.
|
||||
:return: A list of sub-package names.
|
||||
"""
|
||||
package = importlib.import_module(package_name)
|
||||
return [name for _, name, is_pkg in pkgutil.iter_modules(package.__path__) if is_pkg]
|
||||
|
||||
|
||||
def find_processors():
|
||||
"""
|
||||
Find all subclasses of DifferenceDetectionProcessor in the specified package.
|
||||
|
||||
:param package_name: The name of the package to scan for processor modules.
|
||||
:return: A list of (module, class) tuples.
|
||||
"""
|
||||
package_name = "changedetectionio.processors" # Name of the current package/module
|
||||
|
||||
processors = []
|
||||
sub_packages = find_sub_packages(package_name)
|
||||
|
||||
for sub_package in sub_packages:
|
||||
module_name = f"{package_name}.{sub_package}.processor"
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
|
||||
# Iterate through all classes in the module
|
||||
for name, obj in inspect.getmembers(module, inspect.isclass):
|
||||
if issubclass(obj, difference_detection_processor) and obj is not difference_detection_processor:
|
||||
processors.append((module, sub_package))
|
||||
except (ModuleNotFoundError, ImportError) as e:
|
||||
logger.warning(f"Failed to import module {module_name}: {e} (find_processors())")
|
||||
|
||||
return processors
|
||||
|
||||
|
||||
def get_parent_module(module):
|
||||
module_name = module.__name__
|
||||
if '.' not in module_name:
|
||||
return None # Top-level module has no parent
|
||||
parent_module_name = module_name.rsplit('.', 1)[0]
|
||||
try:
|
||||
return importlib.import_module(parent_module_name)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
def get_custom_watch_obj_for_processor(processor_name):
|
||||
from changedetectionio.model import Watch
|
||||
watch_class = Watch.model
|
||||
processor_classes = find_processors()
|
||||
custom_watch_obj = next((tpl for tpl in processor_classes if tpl[1] == processor_name), None)
|
||||
if custom_watch_obj:
|
||||
# Parent of .processor.py COULD have its own Watch implementation
|
||||
parent_module = get_parent_module(custom_watch_obj[0])
|
||||
if hasattr(parent_module, 'Watch'):
|
||||
watch_class = parent_module.Watch
|
||||
|
||||
return watch_class
|
||||
|
||||
|
||||
def available_processors():
|
||||
from . import restock_diff, text_json_diff
|
||||
x=[('text_json_diff', text_json_diff.name), ('restock_diff', restock_diff.name)]
|
||||
# @todo Make this smarter with introspection of sorts.
|
||||
return x
|
||||
"""
|
||||
Get a list of processors by name and description for the UI elements
|
||||
:return: A list :)
|
||||
"""
|
||||
|
||||
processor_classes = find_processors()
|
||||
|
||||
available = []
|
||||
for package, processor_class in processor_classes:
|
||||
available.append((processor_class, package.name))
|
||||
|
||||
return available
|
||||
|
||||
|
||||
10
changedetectionio/processors/exceptions.py
Normal file
10
changedetectionio/processors/exceptions.py
Normal file
@@ -0,0 +1,10 @@
|
||||
class ProcessorException(Exception):
|
||||
def __init__(self, message=None, status_code=None, url=None, screenshot=None, has_filters=False, html_content='', xpath_data=None):
|
||||
self.message = message
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
self.screenshot = screenshot
|
||||
self.has_filters = has_filters
|
||||
self.html_content = html_content
|
||||
self.xpath_data = xpath_data
|
||||
return
|
||||
@@ -1,62 +0,0 @@
|
||||
|
||||
from . import difference_detection_processor
|
||||
from loguru import logger
|
||||
import hashlib
|
||||
import urllib3
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
name = 'Re-stock detection for single product pages'
|
||||
description = 'Detects if the product goes back to in-stock'
|
||||
|
||||
class UnableToExtractRestockData(Exception):
|
||||
def __init__(self, status_code):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
return
|
||||
|
||||
class perform_site_check(difference_detection_processor):
|
||||
screenshot = None
|
||||
xpath_data = None
|
||||
|
||||
def run_changedetection(self, watch, skip_when_checksum_same=True):
|
||||
|
||||
if not watch:
|
||||
raise Exception("Watch no longer exists.")
|
||||
|
||||
# Unset any existing notification error
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
|
||||
self.screenshot = self.fetcher.screenshot
|
||||
self.xpath_data = self.fetcher.xpath_data
|
||||
|
||||
# Track the content type
|
||||
update_obj['content_type'] = self.fetcher.headers.get('Content-Type', '')
|
||||
update_obj["last_check_status"] = self.fetcher.get_last_status_code()
|
||||
|
||||
# Main detection method
|
||||
fetched_md5 = None
|
||||
if self.fetcher.instock_data:
|
||||
fetched_md5 = hashlib.md5(self.fetcher.instock_data.encode('utf-8')).hexdigest()
|
||||
# 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold.
|
||||
update_obj["in_stock"] = True if self.fetcher.instock_data == 'Possibly in stock' else False
|
||||
logger.debug(f"Watch UUID {watch.get('uuid')} restock check returned '{self.fetcher.instock_data}' from JS scraper.")
|
||||
else:
|
||||
raise UnableToExtractRestockData(status_code=self.fetcher.status_code)
|
||||
|
||||
# The main thing that all this at the moment comes down to :)
|
||||
changed_detected = False
|
||||
logger.debug(f"Watch UUID {watch.get('uuid')} restock check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}")
|
||||
|
||||
if watch.get('previous_md5') and watch.get('previous_md5') != fetched_md5:
|
||||
# Yes if we only care about it going to instock, AND we are in stock
|
||||
if watch.get('in_stock_only') and update_obj["in_stock"]:
|
||||
changed_detected = True
|
||||
|
||||
if not watch.get('in_stock_only'):
|
||||
# All cases
|
||||
changed_detected = True
|
||||
|
||||
# Always record the new checksum
|
||||
update_obj["previous_md5"] = fetched_md5
|
||||
return changed_detected, update_obj, self.fetcher.instock_data.encode('utf-8').strip()
|
||||
84
changedetectionio/processors/restock_diff/__init__.py
Normal file
84
changedetectionio/processors/restock_diff/__init__.py
Normal file
@@ -0,0 +1,84 @@
|
||||
|
||||
from babel.numbers import parse_decimal
|
||||
from changedetectionio.model.Watch import model as BaseWatch
|
||||
from typing import Union
|
||||
import re
|
||||
|
||||
class Restock(dict):
|
||||
|
||||
def parse_currency(self, raw_value: str) -> Union[float, None]:
|
||||
# Clean and standardize the value (ie 1,400.00 should be 1400.00), even better would be store the whole thing as an integer.
|
||||
standardized_value = raw_value
|
||||
|
||||
if ',' in standardized_value and '.' in standardized_value:
|
||||
# Identify the correct decimal separator
|
||||
if standardized_value.rfind('.') > standardized_value.rfind(','):
|
||||
standardized_value = standardized_value.replace(',', '')
|
||||
else:
|
||||
standardized_value = standardized_value.replace('.', '').replace(',', '.')
|
||||
else:
|
||||
standardized_value = standardized_value.replace(',', '.')
|
||||
|
||||
# Remove any non-numeric characters except for the decimal point
|
||||
standardized_value = re.sub(r'[^\d.-]', '', standardized_value)
|
||||
|
||||
if standardized_value:
|
||||
# Convert to float
|
||||
return float(parse_decimal(standardized_value, locale='en'))
|
||||
|
||||
return None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Define default values
|
||||
default_values = {
|
||||
'in_stock': None,
|
||||
'price': None,
|
||||
'currency': None,
|
||||
'original_price': None
|
||||
}
|
||||
|
||||
# Initialize the dictionary with default values
|
||||
super().__init__(default_values)
|
||||
|
||||
# Update with any provided positional arguments (dictionaries)
|
||||
if args:
|
||||
if len(args) == 1 and isinstance(args[0], dict):
|
||||
self.update(args[0])
|
||||
else:
|
||||
raise ValueError("Only one positional argument of type 'dict' is allowed")
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
# Custom logic to handle setting price and original_price
|
||||
if key == 'price' or key == 'original_price':
|
||||
if isinstance(value, str):
|
||||
value = self.parse_currency(raw_value=value)
|
||||
|
||||
super().__setitem__(key, value)
|
||||
|
||||
class Watch(BaseWatch):
|
||||
def __init__(self, *arg, **kw):
|
||||
super().__init__(*arg, **kw)
|
||||
self['restock'] = Restock(kw['default']['restock']) if kw.get('default') and kw['default'].get('restock') else Restock()
|
||||
|
||||
self['restock_settings'] = kw['default']['restock_settings'] if kw.get('default',{}).get('restock_settings') else {
|
||||
'follow_price_changes': True,
|
||||
'in_stock_processing' : 'in_stock_only'
|
||||
} #@todo update
|
||||
|
||||
def clear_watch(self):
|
||||
super().clear_watch()
|
||||
self.update({'restock': Restock()})
|
||||
|
||||
def extra_notification_token_values(self):
|
||||
values = super().extra_notification_token_values()
|
||||
values['restock'] = self.get('restock', {})
|
||||
return values
|
||||
|
||||
def extra_notification_token_placeholder_info(self):
|
||||
values = super().extra_notification_token_placeholder_info()
|
||||
|
||||
values.append(('restock.price', "Price detected"))
|
||||
values.append(('restock.original_price', "Original price at first check"))
|
||||
|
||||
return values
|
||||
|
||||
81
changedetectionio/processors/restock_diff/forms.py
Normal file
81
changedetectionio/processors/restock_diff/forms.py
Normal file
@@ -0,0 +1,81 @@
|
||||
from wtforms import (
|
||||
BooleanField,
|
||||
validators,
|
||||
FloatField
|
||||
)
|
||||
from wtforms.fields.choices import RadioField
|
||||
from wtforms.fields.form import FormField
|
||||
from wtforms.form import Form
|
||||
|
||||
from changedetectionio.forms import processor_text_json_diff_form
|
||||
|
||||
|
||||
class RestockSettingsForm(Form):
|
||||
in_stock_processing = RadioField(label='Re-stock detection', choices=[
|
||||
('in_stock_only', "In Stock only (Out Of Stock -> In Stock only)"),
|
||||
('all_changes', "Any availability changes"),
|
||||
('off', "Off, don't follow availability/restock"),
|
||||
], default="in_stock_only")
|
||||
|
||||
price_change_min = FloatField('Below price to trigger notification', [validators.Optional()],
|
||||
render_kw={"placeholder": "No limit", "size": "10"})
|
||||
price_change_max = FloatField('Above price to trigger notification', [validators.Optional()],
|
||||
render_kw={"placeholder": "No limit", "size": "10"})
|
||||
price_change_threshold_percent = FloatField('Threshold in % for price changes since the original price', validators=[
|
||||
|
||||
validators.Optional(),
|
||||
validators.NumberRange(min=0, max=100, message="Should be between 0 and 100"),
|
||||
], render_kw={"placeholder": "0%", "size": "5"})
|
||||
|
||||
follow_price_changes = BooleanField('Follow price changes', default=True)
|
||||
|
||||
class processor_settings_form(processor_text_json_diff_form):
|
||||
restock_settings = FormField(RestockSettingsForm)
|
||||
|
||||
def extra_tab_content(self):
|
||||
return 'Restock & Price Detection'
|
||||
|
||||
def extra_form_content(self):
|
||||
output = ""
|
||||
|
||||
if getattr(self, 'watch', None) and getattr(self, 'datastore'):
|
||||
for tag_uuid in self.watch.get('tags'):
|
||||
tag = self.datastore.data['settings']['application']['tags'].get(tag_uuid, {})
|
||||
if tag.get('overrides_watch'):
|
||||
# @todo - Quick and dirty, cant access 'url_for' here because its out of scope somehow
|
||||
output = f"""<p><strong>Note! A Group tag overrides the restock and price detection here.</strong></p><style>#restock-fieldset-price-group {{ opacity: 0.6; }}</style>"""
|
||||
|
||||
output += """
|
||||
{% from '_helpers.html' import render_field, render_checkbox_field, render_button %}
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
toggleOpacity('#restock_settings-follow_price_changes', '.price-change-minmax', true);
|
||||
});
|
||||
</script>
|
||||
|
||||
<fieldset id="restock-fieldset-price-group">
|
||||
<div class="pure-control-group">
|
||||
<fieldset class="pure-group inline-radio">
|
||||
{{ render_field(form.restock_settings.in_stock_processing) }}
|
||||
</fieldset>
|
||||
<fieldset class="pure-group">
|
||||
{{ render_checkbox_field(form.restock_settings.follow_price_changes) }}
|
||||
<span class="pure-form-message-inline">Changes in price should trigger a notification</span>
|
||||
</fieldset>
|
||||
<fieldset class="pure-group price-change-minmax">
|
||||
{{ render_field(form.restock_settings.price_change_min, placeholder=watch.get('restock', {}).get('price')) }}
|
||||
<span class="pure-form-message-inline">Minimum amount, Trigger a change/notification when the price drops <i>below</i> this value.</span>
|
||||
</fieldset>
|
||||
<fieldset class="pure-group price-change-minmax">
|
||||
{{ render_field(form.restock_settings.price_change_max, placeholder=watch.get('restock', {}).get('price')) }}
|
||||
<span class="pure-form-message-inline">Maximum amount, Trigger a change/notification when the price rises <i>above</i> this value.</span>
|
||||
</fieldset>
|
||||
<fieldset class="pure-group price-change-minmax">
|
||||
{{ render_field(form.restock_settings.price_change_threshold_percent) }}
|
||||
<span class="pure-form-message-inline">Price must change more than this % to trigger a change since the first check.</span><br>
|
||||
<span class="pure-form-message-inline">For example, If the product is $1,000 USD originally, <strong>2%</strong> would mean it has to change more than $20 since the first check.</span><br>
|
||||
</fieldset>
|
||||
</div>
|
||||
</fieldset>
|
||||
"""
|
||||
return output
|
||||
314
changedetectionio/processors/restock_diff/processor.py
Normal file
314
changedetectionio/processors/restock_diff/processor.py
Normal file
@@ -0,0 +1,314 @@
|
||||
from .. import difference_detection_processor
|
||||
from ..exceptions import ProcessorException
|
||||
from . import Restock
|
||||
from loguru import logger
|
||||
|
||||
import urllib3
|
||||
import time
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
name = 'Re-stock & Price detection for single product pages'
|
||||
description = 'Detects if the product goes back to in-stock'
|
||||
|
||||
class UnableToExtractRestockData(Exception):
|
||||
def __init__(self, status_code):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
return
|
||||
|
||||
class MoreThanOnePriceFound(Exception):
|
||||
def __init__(self):
|
||||
return
|
||||
|
||||
def _search_prop_by_value(matches, value):
|
||||
for properties in matches:
|
||||
for prop in properties:
|
||||
if value in prop[0]:
|
||||
return prop[1] # Yield the desired value and exit the function
|
||||
|
||||
def _deduplicate_prices(data):
|
||||
import re
|
||||
|
||||
'''
|
||||
Some price data has multiple entries, OR it has a single entry with ['$159', '159', 159, "$ 159"] or just "159"
|
||||
Get all the values, clean it and add it to a set then return the unique values
|
||||
'''
|
||||
unique_data = set()
|
||||
|
||||
# Return the complete 'datum' where its price was not seen before
|
||||
for datum in data:
|
||||
|
||||
if isinstance(datum.value, list):
|
||||
# Process each item in the list
|
||||
normalized_value = set([float(re.sub(r'[^\d.]', '', str(item))) for item in datum.value if str(item).strip()])
|
||||
unique_data.update(normalized_value)
|
||||
else:
|
||||
# Process single value
|
||||
v = float(re.sub(r'[^\d.]', '', str(datum.value)))
|
||||
unique_data.add(v)
|
||||
|
||||
return list(unique_data)
|
||||
|
||||
|
||||
# should return Restock()
|
||||
# add casting?
|
||||
def get_itemprop_availability(html_content) -> Restock:
|
||||
"""
|
||||
Kind of funny/cool way to find price/availability in one many different possibilities.
|
||||
Use 'extruct' to find any possible RDFa/microdata/json-ld data, make a JSON string from the output then search it.
|
||||
"""
|
||||
from jsonpath_ng import parse
|
||||
|
||||
import re
|
||||
now = time.time()
|
||||
import extruct
|
||||
logger.trace(f"Imported extruct module in {time.time() - now:.3f}s")
|
||||
|
||||
now = time.time()
|
||||
|
||||
# Extruct is very slow, I'm wondering if some ML is going to be faster (800ms on my i7), 'rdfa' seems to be the heaviest.
|
||||
syntaxes = ['dublincore', 'json-ld', 'microdata', 'microformat', 'opengraph']
|
||||
try:
|
||||
data = extruct.extract(html_content, syntaxes=syntaxes)
|
||||
except Exception as e:
|
||||
logger.warning(f"Unable to extract data, document parsing with extruct failed with {type(e).__name__} - {str(e)}")
|
||||
return Restock()
|
||||
|
||||
logger.trace(f"Extruct basic extract of all metadata done in {time.time() - now:.3f}s")
|
||||
|
||||
# First phase, dead simple scanning of anything that looks useful
|
||||
value = Restock()
|
||||
if data:
|
||||
logger.debug(f"Using jsonpath to find price/availability/etc")
|
||||
price_parse = parse('$..(price|Price)')
|
||||
pricecurrency_parse = parse('$..(pricecurrency|currency|priceCurrency )')
|
||||
availability_parse = parse('$..(availability|Availability)')
|
||||
|
||||
price_result = _deduplicate_prices(price_parse.find(data))
|
||||
if price_result:
|
||||
# Right now, we just support single product items, maybe we will store the whole actual metadata seperately in teh future and
|
||||
# parse that for the UI?
|
||||
if len(price_result) > 1 and len(price_result) > 1:
|
||||
# See of all prices are different, in the case that one product has many embedded data types with the same price
|
||||
# One might have $121.95 and another 121.95 etc
|
||||
logger.warning(f"More than one price found {price_result}, throwing exception, cant use this plugin.")
|
||||
raise MoreThanOnePriceFound()
|
||||
|
||||
value['price'] = price_result[0]
|
||||
|
||||
pricecurrency_result = pricecurrency_parse.find(data)
|
||||
if pricecurrency_result:
|
||||
value['currency'] = pricecurrency_result[0].value
|
||||
|
||||
availability_result = availability_parse.find(data)
|
||||
if availability_result:
|
||||
value['availability'] = availability_result[0].value
|
||||
|
||||
if value.get('availability'):
|
||||
value['availability'] = re.sub(r'(?i)^(https|http)://schema.org/', '',
|
||||
value.get('availability').strip(' "\'').lower()) if value.get('availability') else None
|
||||
|
||||
# Second, go dig OpenGraph which is something that jsonpath_ng cant do because of the tuples and double-dots (:)
|
||||
if not value.get('price') or value.get('availability'):
|
||||
logger.debug(f"Alternatively digging through OpenGraph properties for restock/price info..")
|
||||
jsonpath_expr = parse('$..properties')
|
||||
|
||||
for match in jsonpath_expr.find(data):
|
||||
if not value.get('price'):
|
||||
value['price'] = _search_prop_by_value([match.value], "price:amount")
|
||||
if not value.get('availability'):
|
||||
value['availability'] = _search_prop_by_value([match.value], "product:availability")
|
||||
if not value.get('currency'):
|
||||
value['currency'] = _search_prop_by_value([match.value], "price:currency")
|
||||
logger.trace(f"Processed with Extruct in {time.time()-now:.3f}s")
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def is_between(number, lower=None, upper=None):
|
||||
"""
|
||||
Check if a number is between two values.
|
||||
|
||||
Parameters:
|
||||
number (float): The number to check.
|
||||
lower (float or None): The lower bound (inclusive). If None, no lower bound.
|
||||
upper (float or None): The upper bound (inclusive). If None, no upper bound.
|
||||
|
||||
Returns:
|
||||
bool: True if the number is between the lower and upper bounds, False otherwise.
|
||||
"""
|
||||
return (lower is None or lower <= number) and (upper is None or number <= upper)
|
||||
|
||||
|
||||
class perform_site_check(difference_detection_processor):
|
||||
screenshot = None
|
||||
xpath_data = None
|
||||
|
||||
def run_changedetection(self, watch):
|
||||
import hashlib
|
||||
|
||||
if not watch:
|
||||
raise Exception("Watch no longer exists.")
|
||||
|
||||
# Unset any existing notification error
|
||||
update_obj = {'last_notification_error': False, 'last_error': False, 'restock': Restock()}
|
||||
|
||||
self.screenshot = self.fetcher.screenshot
|
||||
self.xpath_data = self.fetcher.xpath_data
|
||||
|
||||
# Track the content type
|
||||
update_obj['content_type'] = self.fetcher.headers.get('Content-Type', '')
|
||||
update_obj["last_check_status"] = self.fetcher.get_last_status_code()
|
||||
|
||||
# Only try to process restock information (like scraping for keywords) if the page was actually rendered correctly.
|
||||
# Otherwise it will assume "in stock" because nothing suggesting the opposite was found
|
||||
from ...html_tools import html_to_text
|
||||
text = html_to_text(self.fetcher.content)
|
||||
logger.debug(f"Length of text after conversion: {len(text)}")
|
||||
if not len(text):
|
||||
from ...content_fetchers.exceptions import ReplyWithContentButNoText
|
||||
raise ReplyWithContentButNoText(url=watch.link,
|
||||
status_code=self.fetcher.get_last_status_code(),
|
||||
screenshot=self.fetcher.screenshot,
|
||||
html_content=self.fetcher.content,
|
||||
xpath_data=self.fetcher.xpath_data
|
||||
)
|
||||
|
||||
# Which restock settings to compare against?
|
||||
restock_settings = watch.get('restock_settings', {})
|
||||
|
||||
# See if any tags have 'activate for individual watches in this tag/group?' enabled and use the first we find
|
||||
for tag_uuid in watch.get('tags'):
|
||||
tag = self.datastore.data['settings']['application']['tags'].get(tag_uuid, {})
|
||||
if tag.get('overrides_watch'):
|
||||
restock_settings = tag.get('restock_settings', {})
|
||||
logger.info(f"Watch {watch.get('uuid')} - Tag '{tag.get('title')}' selected for restock settings override")
|
||||
break
|
||||
|
||||
|
||||
itemprop_availability = {}
|
||||
try:
|
||||
itemprop_availability = get_itemprop_availability(self.fetcher.content)
|
||||
except MoreThanOnePriceFound as e:
|
||||
# Add the real data
|
||||
raise ProcessorException(message="Cannot run, more than one price detected, this plugin is only for product pages with ONE product, try the content-change detection mode.",
|
||||
url=watch.get('url'),
|
||||
status_code=self.fetcher.get_last_status_code(),
|
||||
screenshot=self.fetcher.screenshot,
|
||||
xpath_data=self.fetcher.xpath_data
|
||||
)
|
||||
|
||||
# Something valid in get_itemprop_availability() by scraping metadata ?
|
||||
if itemprop_availability.get('price') or itemprop_availability.get('availability'):
|
||||
# Store for other usage
|
||||
update_obj['restock'] = itemprop_availability
|
||||
|
||||
if itemprop_availability.get('availability'):
|
||||
# @todo: Configurable?
|
||||
if any(substring.lower() in itemprop_availability['availability'].lower() for substring in [
|
||||
'instock',
|
||||
'instoreonly',
|
||||
'limitedavailability',
|
||||
'onlineonly',
|
||||
'presale']
|
||||
):
|
||||
update_obj['restock']['in_stock'] = True
|
||||
else:
|
||||
update_obj['restock']['in_stock'] = False
|
||||
|
||||
# Main detection method
|
||||
fetched_md5 = None
|
||||
|
||||
# store original price if not set
|
||||
if itemprop_availability and itemprop_availability.get('price') and not itemprop_availability.get('original_price'):
|
||||
itemprop_availability['original_price'] = itemprop_availability.get('price')
|
||||
update_obj['restock']["original_price"] = itemprop_availability.get('price')
|
||||
|
||||
if not self.fetcher.instock_data and not itemprop_availability.get('availability') and not itemprop_availability.get('price'):
|
||||
raise ProcessorException(
|
||||
message=f"Unable to extract restock data for this page unfortunately. (Got code {self.fetcher.get_last_status_code()} from server), no embedded stock information was found and nothing interesting in the text, try using this watch with Chrome.",
|
||||
url=watch.get('url'),
|
||||
status_code=self.fetcher.get_last_status_code(),
|
||||
screenshot=self.fetcher.screenshot,
|
||||
xpath_data=self.fetcher.xpath_data
|
||||
)
|
||||
|
||||
logger.debug(f"self.fetcher.instock_data is - '{self.fetcher.instock_data}' and itemprop_availability.get('availability') is {itemprop_availability.get('availability')}")
|
||||
# Nothing automatic in microdata found, revert to scraping the page
|
||||
if self.fetcher.instock_data and itemprop_availability.get('availability') is None:
|
||||
# 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold.
|
||||
# Careful! this does not really come from chrome/js when the watch is set to plaintext
|
||||
update_obj['restock']["in_stock"] = True if self.fetcher.instock_data == 'Possibly in stock' else False
|
||||
logger.debug(f"Watch UUID {watch.get('uuid')} restock check returned instock_data - '{self.fetcher.instock_data}' from JS scraper.")
|
||||
|
||||
# Very often websites will lie about the 'availability' in the metadata, so if the scraped version says its NOT in stock, use that.
|
||||
if self.fetcher.instock_data and self.fetcher.instock_data != 'Possibly in stock':
|
||||
if update_obj['restock'].get('in_stock'):
|
||||
logger.warning(
|
||||
f"Lie detected in the availability machine data!! when scraping said its not in stock!! itemprop was '{itemprop_availability}' and scraped from browser was '{self.fetcher.instock_data}' update obj was {update_obj['restock']} ")
|
||||
logger.warning(f"Setting instock to FALSE, scraper found '{self.fetcher.instock_data}' in the body but metadata reported not-in-stock")
|
||||
update_obj['restock']["in_stock"] = False
|
||||
|
||||
# What we store in the snapshot
|
||||
price = update_obj.get('restock').get('price') if update_obj.get('restock').get('price') else ""
|
||||
snapshot_content = f"In Stock: {update_obj.get('restock').get('in_stock')} - Price: {price}"
|
||||
|
||||
# Main detection method
|
||||
fetched_md5 = hashlib.md5(snapshot_content.encode('utf-8')).hexdigest()
|
||||
|
||||
# The main thing that all this at the moment comes down to :)
|
||||
changed_detected = False
|
||||
logger.debug(f"Watch UUID {watch.get('uuid')} restock check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}")
|
||||
|
||||
# out of stock -> back in stock only?
|
||||
if watch.get('restock') and watch['restock'].get('in_stock') != update_obj['restock'].get('in_stock'):
|
||||
# Yes if we only care about it going to instock, AND we are in stock
|
||||
if restock_settings.get('in_stock_processing') == 'in_stock_only' and update_obj['restock']['in_stock']:
|
||||
changed_detected = True
|
||||
|
||||
if restock_settings.get('in_stock_processing') == 'all_changes':
|
||||
# All cases
|
||||
changed_detected = True
|
||||
|
||||
if restock_settings.get('follow_price_changes') and watch.get('restock') and update_obj.get('restock') and update_obj['restock'].get('price'):
|
||||
price = float(update_obj['restock'].get('price'))
|
||||
# Default to current price if no previous price found
|
||||
if watch['restock'].get('original_price'):
|
||||
previous_price = float(watch['restock'].get('original_price'))
|
||||
# It was different, but negate it further down
|
||||
if price != previous_price:
|
||||
changed_detected = True
|
||||
|
||||
# Minimum/maximum price limit
|
||||
if update_obj.get('restock') and update_obj['restock'].get('price'):
|
||||
logger.debug(
|
||||
f"{watch.get('uuid')} - Change was detected, 'price_change_max' is '{restock_settings.get('price_change_max', '')}' 'price_change_min' is '{restock_settings.get('price_change_min', '')}', price from website is '{update_obj['restock'].get('price', '')}'.")
|
||||
if update_obj['restock'].get('price'):
|
||||
min_limit = float(restock_settings.get('price_change_min')) if restock_settings.get('price_change_min') else None
|
||||
max_limit = float(restock_settings.get('price_change_max')) if restock_settings.get('price_change_max') else None
|
||||
|
||||
price = float(update_obj['restock'].get('price'))
|
||||
logger.debug(f"{watch.get('uuid')} after float conversion - Min limit: '{min_limit}' Max limit: '{max_limit}' Price: '{price}'")
|
||||
if min_limit or max_limit:
|
||||
if is_between(number=price, lower=min_limit, upper=max_limit):
|
||||
# Price was between min/max limit, so there was nothing todo in any case
|
||||
logger.trace(f"{watch.get('uuid')} {price} is between {min_limit} and {max_limit}, nothing to check, forcing changed_detected = False (was {changed_detected})")
|
||||
changed_detected = False
|
||||
else:
|
||||
logger.trace(f"{watch.get('uuid')} {price} is between {min_limit} and {max_limit}, continuing normal comparison")
|
||||
|
||||
# Price comparison by %
|
||||
if watch['restock'].get('original_price') and changed_detected and restock_settings.get('price_change_threshold_percent'):
|
||||
previous_price = float(watch['restock'].get('original_price'))
|
||||
pc = float(restock_settings.get('price_change_threshold_percent'))
|
||||
change = abs((price - previous_price) / previous_price * 100)
|
||||
if change and change <= pc:
|
||||
logger.debug(f"{watch.get('uuid')} Override change-detected to FALSE because % threshold ({pc}%) was {change:.3f}%")
|
||||
changed_detected = False
|
||||
else:
|
||||
logger.debug(f"{watch.get('uuid')} Price change was {change:.3f}% , (threshold {pc}%)")
|
||||
|
||||
# Always record the new checksum
|
||||
update_obj["previous_md5"] = fetched_md5
|
||||
|
||||
return changed_detected, update_obj, snapshot_content.strip()
|
||||
115
changedetectionio/processors/text_json_diff/__init__.py
Normal file
115
changedetectionio/processors/text_json_diff/__init__.py
Normal file
@@ -0,0 +1,115 @@
|
||||
|
||||
from loguru import logger
|
||||
|
||||
|
||||
|
||||
def _task(watch, update_handler):
|
||||
from changedetectionio.content_fetchers.exceptions import ReplyWithContentButNoText
|
||||
from changedetectionio.processors.text_json_diff.processor import FilterNotFoundInResponse
|
||||
|
||||
text_after_filter = ''
|
||||
|
||||
try:
|
||||
# The slow process (we run 2 of these in parallel)
|
||||
changed_detected, update_obj, text_after_filter = update_handler.run_changedetection(watch=watch)
|
||||
except FilterNotFoundInResponse as e:
|
||||
text_after_filter = f"Filter not found in HTML: {str(e)}"
|
||||
except ReplyWithContentButNoText as e:
|
||||
text_after_filter = f"Filter found but no text (empty result)"
|
||||
except Exception as e:
|
||||
text_after_filter = f"Error: {str(e)}"
|
||||
|
||||
if not text_after_filter.strip():
|
||||
text_after_filter = 'Empty content'
|
||||
|
||||
# because run_changedetection always returns bytes due to saving the snapshots etc
|
||||
text_after_filter = text_after_filter.decode('utf-8') if isinstance(text_after_filter, bytes) else text_after_filter
|
||||
|
||||
return text_after_filter
|
||||
|
||||
|
||||
def prepare_filter_prevew(datastore, watch_uuid):
|
||||
'''Used by @app.route("/edit/<string:uuid>/preview-rendered", methods=['POST'])'''
|
||||
from changedetectionio import forms, html_tools
|
||||
from changedetectionio.model.Watch import model as watch_model
|
||||
from concurrent.futures import ProcessPoolExecutor
|
||||
from copy import deepcopy
|
||||
from flask import request, jsonify
|
||||
import brotli
|
||||
import importlib
|
||||
import os
|
||||
import time
|
||||
now = time.time()
|
||||
|
||||
text_after_filter = ''
|
||||
text_before_filter = ''
|
||||
trigger_line_numbers = []
|
||||
ignore_line_numbers = []
|
||||
|
||||
tmp_watch = deepcopy(datastore.data['watching'].get(watch_uuid))
|
||||
|
||||
if tmp_watch and tmp_watch.history and os.path.isdir(tmp_watch.watch_data_dir):
|
||||
# Splice in the temporary stuff from the form
|
||||
form = forms.processor_text_json_diff_form(formdata=request.form if request.method == 'POST' else None,
|
||||
data=request.form
|
||||
)
|
||||
|
||||
# Only update vars that came in via the AJAX post
|
||||
p = {k: v for k, v in form.data.items() if k in request.form.keys()}
|
||||
tmp_watch.update(p)
|
||||
blank_watch_no_filters = watch_model()
|
||||
blank_watch_no_filters['url'] = tmp_watch.get('url')
|
||||
|
||||
latest_filename = next(reversed(tmp_watch.history))
|
||||
html_fname = os.path.join(tmp_watch.watch_data_dir, f"{latest_filename}.html.br")
|
||||
with open(html_fname, 'rb') as f:
|
||||
decompressed_data = brotli.decompress(f.read()).decode('utf-8') if html_fname.endswith('.br') else f.read().decode('utf-8')
|
||||
|
||||
# Just like a normal change detection except provide a fake "watch" object and dont call .call_browser()
|
||||
processor_module = importlib.import_module("changedetectionio.processors.text_json_diff.processor")
|
||||
update_handler = processor_module.perform_site_check(datastore=datastore,
|
||||
watch_uuid=tmp_watch.get('uuid') # probably not needed anymore anyway?
|
||||
)
|
||||
# Use the last loaded HTML as the input
|
||||
update_handler.datastore = datastore
|
||||
update_handler.fetcher.content = str(decompressed_data) # str() because playwright/puppeteer/requests return string
|
||||
update_handler.fetcher.headers['content-type'] = tmp_watch.get('content-type')
|
||||
|
||||
# Process our watch with filters and the HTML from disk, and also a blank watch with no filters but also with the same HTML from disk
|
||||
# Do this as a parallel process because it could take some time
|
||||
with ProcessPoolExecutor(max_workers=2) as executor:
|
||||
future1 = executor.submit(_task, tmp_watch, update_handler)
|
||||
future2 = executor.submit(_task, blank_watch_no_filters, update_handler)
|
||||
|
||||
text_after_filter = future1.result()
|
||||
text_before_filter = future2.result()
|
||||
|
||||
try:
|
||||
trigger_line_numbers = html_tools.strip_ignore_text(content=text_after_filter,
|
||||
wordlist=tmp_watch['trigger_text'],
|
||||
mode='line numbers'
|
||||
)
|
||||
except Exception as e:
|
||||
text_before_filter = f"Error: {str(e)}"
|
||||
|
||||
try:
|
||||
text_to_ignore = tmp_watch.get('ignore_text', []) + datastore.data['settings']['application'].get('global_ignore_text', [])
|
||||
ignore_line_numbers = html_tools.strip_ignore_text(content=text_after_filter,
|
||||
wordlist=text_to_ignore,
|
||||
mode='line numbers'
|
||||
)
|
||||
except Exception as e:
|
||||
text_before_filter = f"Error: {str(e)}"
|
||||
|
||||
logger.trace(f"Parsed in {time.time() - now:.3f}s")
|
||||
|
||||
return jsonify(
|
||||
{
|
||||
'after_filter': text_after_filter,
|
||||
'before_filter': text_before_filter.decode('utf-8') if isinstance(text_before_filter, bytes) else text_before_filter,
|
||||
'duration': time.time() - now,
|
||||
'trigger_line_numbers': trigger_line_numbers,
|
||||
'ignore_line_numbers': ignore_line_numbers,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -6,8 +6,8 @@ import os
|
||||
import re
|
||||
import urllib3
|
||||
|
||||
from . import difference_detection_processor
|
||||
from ..html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text
|
||||
from changedetectionio.processors import difference_detection_processor
|
||||
from changedetectionio.html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text, TRANSLATE_WHITESPACE_TABLE
|
||||
from changedetectionio import html_tools, content_fetchers
|
||||
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
|
||||
from loguru import logger
|
||||
@@ -16,6 +16,7 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
name = 'Webpage Text/HTML, JSON and PDF changes'
|
||||
description = 'Detects all text changes where possible'
|
||||
|
||||
json_filter_prefixes = ['json:', 'jq:', 'jqraw:']
|
||||
|
||||
class FilterNotFoundInResponse(ValueError):
|
||||
@@ -34,7 +35,7 @@ class PDFToHTMLToolNotFound(ValueError):
|
||||
# (set_proxy_from_list)
|
||||
class perform_site_check(difference_detection_processor):
|
||||
|
||||
def run_changedetection(self, watch, skip_when_checksum_same=True):
|
||||
def run_changedetection(self, watch):
|
||||
changed_detected = False
|
||||
html_content = ""
|
||||
screenshot = False # as bytes
|
||||
@@ -57,9 +58,6 @@ class perform_site_check(difference_detection_processor):
|
||||
# Watches added automatically in the queue manager will skip if its the same checksum as the previous run
|
||||
# Saves a lot of CPU
|
||||
update_obj['previous_md5_before_filters'] = hashlib.md5(self.fetcher.content.encode('utf-8')).hexdigest()
|
||||
if skip_when_checksum_same:
|
||||
if update_obj['previous_md5_before_filters'] == watch.get('previous_md5_before_filters'):
|
||||
raise content_fetchers.exceptions.checksumFromPreviousCheckWasTheSame()
|
||||
|
||||
# Fetching complete, now filters
|
||||
|
||||
@@ -174,13 +172,13 @@ class perform_site_check(difference_detection_processor):
|
||||
html_content=self.fetcher.content,
|
||||
append_pretty_line_formatting=not watch.is_source_type_url,
|
||||
is_rss=is_rss)
|
||||
|
||||
elif filter_rule.startswith('xpath1:'):
|
||||
html_content += html_tools.xpath1_filter(xpath_filter=filter_rule.replace('xpath1:', ''),
|
||||
html_content=self.fetcher.content,
|
||||
append_pretty_line_formatting=not watch.is_source_type_url,
|
||||
is_rss=is_rss)
|
||||
html_content=self.fetcher.content,
|
||||
append_pretty_line_formatting=not watch.is_source_type_url,
|
||||
is_rss=is_rss)
|
||||
else:
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
html_content += html_tools.include_filters(include_filters=filter_rule,
|
||||
html_content=self.fetcher.content,
|
||||
append_pretty_line_formatting=not watch.is_source_type_url)
|
||||
@@ -196,28 +194,24 @@ class perform_site_check(difference_detection_processor):
|
||||
else:
|
||||
# extract text
|
||||
do_anchor = self.datastore.data["settings"]["application"].get("render_anchor_tag_content", False)
|
||||
stripped_text_from_html = \
|
||||
html_tools.html_to_text(
|
||||
html_content=html_content,
|
||||
render_anchor_tag_content=do_anchor,
|
||||
is_rss=is_rss # #1874 activate the <title workaround hack
|
||||
)
|
||||
stripped_text_from_html = html_tools.html_to_text(html_content=html_content,
|
||||
render_anchor_tag_content=do_anchor,
|
||||
is_rss=is_rss) # 1874 activate the <title workaround hack
|
||||
|
||||
if watch.get('sort_text_alphabetically') and stripped_text_from_html:
|
||||
# Note: Because a <p>something</p> will add an extra line feed to signify the paragraph gap
|
||||
# we end up with 'Some text\n\n', sorting will add all those extra \n at the start, so we remove them here.
|
||||
stripped_text_from_html = stripped_text_from_html.replace('\n\n', '\n')
|
||||
stripped_text_from_html = '\n'.join( sorted(stripped_text_from_html.splitlines(), key=lambda x: x.lower() ))
|
||||
if watch.get('trim_text_whitespace'):
|
||||
stripped_text_from_html = '\n'.join(line.strip() for line in stripped_text_from_html.replace("\n\n", "\n").splitlines())
|
||||
|
||||
# Re #340 - return the content before the 'ignore text' was applied
|
||||
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
|
||||
# Also used to calculate/show what was removed
|
||||
text_content_before_ignored_filter = stripped_text_from_html
|
||||
|
||||
# @todo whitespace coming from missing rtrim()?
|
||||
# stripped_text_from_html could be based on their preferences, replace the processed text with only that which they want to know about.
|
||||
# Rewrite's the processing text based on only what diff result they want to see
|
||||
|
||||
if watch.has_special_diff_filter_options_set() and len(watch.history.keys()):
|
||||
# Now the content comes from the diff-parser and not the returned HTTP traffic, so could be some differences
|
||||
from .. import diff
|
||||
from changedetectionio import diff
|
||||
# needs to not include (added) etc or it may get used twice
|
||||
# Replace the processed text with the preferred result
|
||||
rendered_diff = diff.render_diff(previous_version_file_contents=watch.get_last_fetched_text_before_filters(),
|
||||
@@ -229,12 +223,12 @@ class perform_site_check(difference_detection_processor):
|
||||
line_feed_sep="\n",
|
||||
include_change_type_prefix=False)
|
||||
|
||||
watch.save_last_text_fetched_before_filters(text_content_before_ignored_filter)
|
||||
watch.save_last_text_fetched_before_filters(text_content_before_ignored_filter.encode('utf-8'))
|
||||
|
||||
if not rendered_diff and stripped_text_from_html:
|
||||
# We had some content, but no differences were found
|
||||
# Store our new file as the MD5 so it will trigger in the future
|
||||
c = hashlib.md5(text_content_before_ignored_filter.translate(None, b'\r\n\t ')).hexdigest()
|
||||
c = hashlib.md5(stripped_text_from_html.translate(TRANSLATE_WHITESPACE_TABLE).encode('utf-8')).hexdigest()
|
||||
return False, {'previous_md5': c}, stripped_text_from_html.encode('utf-8')
|
||||
else:
|
||||
stripped_text_from_html = rendered_diff
|
||||
@@ -255,14 +249,6 @@ class perform_site_check(difference_detection_processor):
|
||||
|
||||
update_obj["last_check_status"] = self.fetcher.get_last_status_code()
|
||||
|
||||
# If there's text to skip
|
||||
# @todo we could abstract out the get_text() to handle this cleaner
|
||||
text_to_ignore = watch.get('ignore_text', []) + self.datastore.data['settings']['application'].get('global_ignore_text', [])
|
||||
if len(text_to_ignore):
|
||||
stripped_text_from_html = html_tools.strip_ignore_text(stripped_text_from_html, text_to_ignore)
|
||||
else:
|
||||
stripped_text_from_html = stripped_text_from_html.encode('utf8')
|
||||
|
||||
# 615 Extract text by regex
|
||||
extract_text = watch.get('extract_text', [])
|
||||
if len(extract_text) > 0:
|
||||
@@ -271,37 +257,53 @@ class perform_site_check(difference_detection_processor):
|
||||
# incase they specified something in '/.../x'
|
||||
if re.search(PERL_STYLE_REGEX, s_re, re.IGNORECASE):
|
||||
regex = html_tools.perl_style_slash_enclosed_regex_to_options(s_re)
|
||||
result = re.findall(regex.encode('utf-8'), stripped_text_from_html)
|
||||
result = re.findall(regex, stripped_text_from_html)
|
||||
|
||||
for l in result:
|
||||
if type(l) is tuple:
|
||||
# @todo - some formatter option default (between groups)
|
||||
regex_matched_output += list(l) + [b'\n']
|
||||
regex_matched_output += list(l) + ['\n']
|
||||
else:
|
||||
# @todo - some formatter option default (between each ungrouped result)
|
||||
regex_matched_output += [l] + [b'\n']
|
||||
regex_matched_output += [l] + ['\n']
|
||||
else:
|
||||
# Doesnt look like regex, just hunt for plaintext and return that which matches
|
||||
# `stripped_text_from_html` will be bytes, so we must encode s_re also to bytes
|
||||
r = re.compile(re.escape(s_re.encode('utf-8')), re.IGNORECASE)
|
||||
r = re.compile(re.escape(s_re), re.IGNORECASE)
|
||||
res = r.findall(stripped_text_from_html)
|
||||
if res:
|
||||
for match in res:
|
||||
regex_matched_output += [match] + [b'\n']
|
||||
regex_matched_output += [match] + ['\n']
|
||||
|
||||
##########################################################
|
||||
stripped_text_from_html = ''
|
||||
|
||||
# Now we will only show what the regex matched
|
||||
stripped_text_from_html = b''
|
||||
text_content_before_ignored_filter = b''
|
||||
if regex_matched_output:
|
||||
# @todo some formatter for presentation?
|
||||
stripped_text_from_html = b''.join(regex_matched_output)
|
||||
text_content_before_ignored_filter = stripped_text_from_html
|
||||
stripped_text_from_html = ''.join(regex_matched_output)
|
||||
|
||||
if watch.get('remove_duplicate_lines'):
|
||||
stripped_text_from_html = '\n'.join(dict.fromkeys(line for line in stripped_text_from_html.replace("\n\n", "\n").splitlines()))
|
||||
|
||||
|
||||
if watch.get('sort_text_alphabetically'):
|
||||
# Note: Because a <p>something</p> will add an extra line feed to signify the paragraph gap
|
||||
# we end up with 'Some text\n\n', sorting will add all those extra \n at the start, so we remove them here.
|
||||
stripped_text_from_html = stripped_text_from_html.replace("\n\n", "\n")
|
||||
stripped_text_from_html = '\n'.join(sorted(stripped_text_from_html.splitlines(), key=lambda x: x.lower()))
|
||||
|
||||
### CALCULATE MD5
|
||||
# If there's text to ignore
|
||||
text_to_ignore = watch.get('ignore_text', []) + self.datastore.data['settings']['application'].get('global_ignore_text', [])
|
||||
text_for_checksuming = stripped_text_from_html
|
||||
if text_to_ignore:
|
||||
text_for_checksuming = html_tools.strip_ignore_text(stripped_text_from_html, text_to_ignore)
|
||||
|
||||
# Re #133 - if we should strip whitespaces from triggering the change detected comparison
|
||||
if self.datastore.data['settings']['application'].get('ignore_whitespace', False):
|
||||
fetched_md5 = hashlib.md5(stripped_text_from_html.translate(None, b'\r\n\t ')).hexdigest()
|
||||
if text_for_checksuming and self.datastore.data['settings']['application'].get('ignore_whitespace', False):
|
||||
fetched_md5 = hashlib.md5(text_for_checksuming.translate(TRANSLATE_WHITESPACE_TABLE).encode('utf-8')).hexdigest()
|
||||
else:
|
||||
fetched_md5 = hashlib.md5(stripped_text_from_html).hexdigest()
|
||||
fetched_md5 = hashlib.md5(text_for_checksuming.encode('utf-8')).hexdigest()
|
||||
|
||||
############ Blocking rules, after checksum #################
|
||||
blocked = False
|
||||
@@ -329,25 +331,33 @@ class perform_site_check(difference_detection_processor):
|
||||
if result:
|
||||
blocked = True
|
||||
|
||||
# The main thing that all this at the moment comes down to :)
|
||||
if watch.get('previous_md5') != fetched_md5:
|
||||
changed_detected = True
|
||||
|
||||
# Looks like something changed, but did it match all the rules?
|
||||
if blocked:
|
||||
changed_detected = False
|
||||
else:
|
||||
# The main thing that all this at the moment comes down to :)
|
||||
if watch.get('previous_md5') != fetched_md5:
|
||||
changed_detected = True
|
||||
|
||||
# Extract title as title
|
||||
if is_html:
|
||||
if self.datastore.data['settings']['application'].get('extract_title_as_title') or watch['extract_title_as_title']:
|
||||
if not watch['title'] or not len(watch['title']):
|
||||
update_obj['title'] = html_tools.extract_element(find='title', html_content=self.fetcher.content)
|
||||
# Always record the new checksum
|
||||
update_obj["previous_md5"] = fetched_md5
|
||||
|
||||
# On the first run of a site, watch['previous_md5'] will be None, set it the current one.
|
||||
if not watch.get('previous_md5'):
|
||||
watch['previous_md5'] = fetched_md5
|
||||
|
||||
logger.debug(f"Watch UUID {watch.get('uuid')} content check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}")
|
||||
|
||||
if changed_detected:
|
||||
if watch.get('check_unique_lines', False):
|
||||
has_unique_lines = watch.lines_contain_something_unique_compared_to_history(lines=stripped_text_from_html.splitlines())
|
||||
ignore_whitespace = self.datastore.data['settings']['application'].get('ignore_whitespace')
|
||||
|
||||
has_unique_lines = watch.lines_contain_something_unique_compared_to_history(
|
||||
lines=stripped_text_from_html.splitlines(),
|
||||
ignore_whitespace=ignore_whitespace
|
||||
)
|
||||
|
||||
# One or more lines? unsure?
|
||||
if not has_unique_lines:
|
||||
logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} didnt have anything new setting change_detected=False")
|
||||
@@ -355,11 +365,6 @@ class perform_site_check(difference_detection_processor):
|
||||
else:
|
||||
logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} had unique content")
|
||||
|
||||
# Always record the new checksum
|
||||
update_obj["previous_md5"] = fetched_md5
|
||||
|
||||
# On the first run of a site, watch['previous_md5'] will be None, set it the current one.
|
||||
if not watch.get('previous_md5'):
|
||||
watch['previous_md5'] = fetched_md5
|
||||
|
||||
return changed_detected, update_obj, text_content_before_ignored_filter
|
||||
# stripped_text_from_html - Everything after filters and NO 'ignored' content
|
||||
return changed_detected, update_obj, stripped_text_from_html
|
||||
@@ -35,4 +35,8 @@ pytest tests/test_access_control.py
|
||||
pytest tests/test_notification.py
|
||||
pytest tests/test_backend.py
|
||||
pytest tests/test_rss.py
|
||||
pytest tests/test_unique_lines.py
|
||||
pytest tests/test_unique_lines.py
|
||||
|
||||
# Check file:// will pickup a file when enabled
|
||||
echo "Hello world" > /tmp/test-file.txt
|
||||
ALLOW_FILE_URI=yes pytest tests/test_security.py
|
||||
|
||||
@@ -16,25 +16,31 @@ echo "---------------------------------- SOCKS5 -------------------"
|
||||
docker run --network changedet-network \
|
||||
-v `pwd`/tests/proxy_socks5/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json \
|
||||
--rm \
|
||||
-e "FLASK_SERVER_NAME=cdio" \
|
||||
--hostname cdio \
|
||||
-e "SOCKSTEST=proxiesjson" \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py'
|
||||
bash -c 'cd changedetectionio && pytest --live-server-host=0.0.0.0 --live-server-port=5004 -s tests/proxy_socks5/test_socks5_proxy_sources.py'
|
||||
|
||||
# SOCKS5 related - by manually entering in UI
|
||||
docker run --network changedet-network \
|
||||
--rm \
|
||||
-e "FLASK_SERVER_NAME=cdio" \
|
||||
--hostname cdio \
|
||||
-e "SOCKSTEST=manual" \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy.py'
|
||||
bash -c 'cd changedetectionio && pytest --live-server-host=0.0.0.0 --live-server-port=5004 -s tests/proxy_socks5/test_socks5_proxy.py'
|
||||
|
||||
# SOCKS5 related - test from proxies.json via playwright - NOTE- PLAYWRIGHT DOESNT SUPPORT AUTHENTICATING PROXY
|
||||
docker run --network changedet-network \
|
||||
-e "SOCKSTEST=manual-playwright" \
|
||||
--hostname cdio \
|
||||
-e "FLASK_SERVER_NAME=cdio" \
|
||||
-v `pwd`/tests/proxy_socks5/proxies.json-example-noauth:/app/changedetectionio/test-datastore/proxies.json \
|
||||
-e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" \
|
||||
--rm \
|
||||
test-changedetectionio \
|
||||
bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py'
|
||||
bash -c 'cd changedetectionio && pytest --live-server-host=0.0.0.0 --live-server-port=5004 -s tests/proxy_socks5/test_socks5_proxy_sources.py'
|
||||
|
||||
echo "socks5 server logs"
|
||||
docker logs socks5proxy
|
||||
|
||||
225
changedetectionio/static/images/schedule.svg
Normal file
225
changedetectionio/static/images/schedule.svg
Normal file
@@ -0,0 +1,225 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
version="1.1"
|
||||
id="Layer_1"
|
||||
x="0px"
|
||||
y="0px"
|
||||
viewBox="0 0 661.20001 665.40002"
|
||||
xml:space="preserve"
|
||||
width="661.20001"
|
||||
height="665.40002"
|
||||
inkscape:version="1.1.2 (0a00cf5339, 2022-02-04)"
|
||||
sodipodi:docname="schedule.svg"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"><defs
|
||||
id="defs77" /><sodipodi:namedview
|
||||
id="namedview75"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:zoom="1.2458671"
|
||||
inkscape:cx="300.59386"
|
||||
inkscape:cy="332.29869"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1051"
|
||||
inkscape:window-x="1920"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="g72" /> <style
|
||||
type="text/css"
|
||||
id="style2"> .st0{fill:#FFFFFF;} .st1{fill:#C1272D;} .st2{fill:#991D26;} .st3{fill:#CCCCCC;} .st4{fill:#E6E6E6;} .st5{fill:#F7931E;} .st6{fill:#F2F2F2;} .st7{fill:none;stroke:#999999;stroke-width:17.9763;stroke-linecap:round;stroke-miterlimit:10;} .st8{fill:none;stroke:#333333;stroke-width:8.9882;stroke-linecap:round;stroke-miterlimit:10;} .st9{fill:none;stroke:#C1272D;stroke-width:5.9921;stroke-linecap:round;stroke-miterlimit:10;} .st10{fill:#245F7F;} </style> <g
|
||||
id="g72"
|
||||
transform="translate(-149.4,-147.3)"> <path
|
||||
class="st0"
|
||||
d="M 601.2,699.8 H 205 c -30.7,0 -55.6,-24.9 -55.6,-55.6 V 248 c 0,-30.7 24.9,-55.6 55.6,-55.6 h 396.2 c 30.7,0 55.6,24.9 55.6,55.6 v 396.2 c 0,30.7 -24.9,55.6 -55.6,55.6 z"
|
||||
id="path4"
|
||||
style="fill:#dfdfdf;fill-opacity:1" /> <path
|
||||
class="st1"
|
||||
d="M 601.2,192.4 H 205 c -30.7,0 -55.6,24.9 -55.6,55.6 v 88.5 H 656.8 V 248 c 0,-30.7 -24.9,-55.6 -55.6,-55.6 z"
|
||||
id="path6"
|
||||
style="fill:#d62128;fill-opacity:1" /> <circle
|
||||
class="st2"
|
||||
cx="253.3"
|
||||
cy="264.5"
|
||||
r="36.700001"
|
||||
id="circle8" /> <circle
|
||||
class="st2"
|
||||
cx="551.59998"
|
||||
cy="264.5"
|
||||
r="36.700001"
|
||||
id="circle10" /> <path
|
||||
class="st3"
|
||||
d="m 253.3,275.7 v 0 c -11.8,0 -21.3,-9.6 -21.3,-21.3 v -85.8 c 0,-11.8 9.6,-21.3 21.3,-21.3 v 0 c 11.8,0 21.3,9.6 21.3,21.3 v 85.8 c 0,11.8 -9.5,21.3 -21.3,21.3 z"
|
||||
id="path12" /> <path
|
||||
class="st3"
|
||||
d="m 551.6,275.7 v 0 c -11.8,0 -21.3,-9.6 -21.3,-21.3 v -85.8 c 0,-11.8 9.6,-21.3 21.3,-21.3 v 0 c 11.8,0 21.3,9.6 21.3,21.3 v 85.8 c 0.1,11.8 -9.5,21.3 -21.3,21.3 z"
|
||||
id="path14" /> <rect
|
||||
x="215.7"
|
||||
y="370.89999"
|
||||
class="st4"
|
||||
width="75.199997"
|
||||
height="75.199997"
|
||||
id="rect16"
|
||||
style="fill:#ffffff;fill-opacity:1" /> <rect
|
||||
x="313"
|
||||
y="370.89999"
|
||||
class="st4"
|
||||
width="75.199997"
|
||||
height="75.199997"
|
||||
id="rect18"
|
||||
style="fill:#ffffff;fill-opacity:1" /> <rect
|
||||
x="410.20001"
|
||||
y="370.89999"
|
||||
class="st4"
|
||||
width="75.199997"
|
||||
height="75.199997"
|
||||
id="rect20"
|
||||
style="fill:#ffffff;fill-opacity:1" /> <rect
|
||||
x="507.5"
|
||||
y="370.89999"
|
||||
class="st4"
|
||||
width="75.199997"
|
||||
height="75.199997"
|
||||
id="rect22"
|
||||
style="fill:#ffffff;fill-opacity:1" /> <rect
|
||||
x="215.7"
|
||||
y="465"
|
||||
class="st4"
|
||||
width="75.199997"
|
||||
height="75.199997"
|
||||
id="rect24"
|
||||
style="fill:#ffffff;fill-opacity:1" /> <rect
|
||||
x="313"
|
||||
y="465"
|
||||
class="st1"
|
||||
width="75.199997"
|
||||
height="75.199997"
|
||||
id="rect26"
|
||||
style="fill:#27c12b;fill-opacity:1" /> <rect
|
||||
x="410.20001"
|
||||
y="465"
|
||||
class="st4"
|
||||
width="75.199997"
|
||||
height="75.199997"
|
||||
id="rect28"
|
||||
style="fill:#ffffff;fill-opacity:1" /> <rect
|
||||
x="507.5"
|
||||
y="465"
|
||||
class="st4"
|
||||
width="75.199997"
|
||||
height="75.199997"
|
||||
id="rect30" /> <rect
|
||||
x="215.7"
|
||||
y="559.09998"
|
||||
class="st4"
|
||||
width="75.199997"
|
||||
height="75.199997"
|
||||
id="rect32"
|
||||
style="fill:#ffffff;fill-opacity:1" /> <rect
|
||||
x="313"
|
||||
y="559.09998"
|
||||
class="st4"
|
||||
width="75.199997"
|
||||
height="75.199997"
|
||||
id="rect34"
|
||||
style="fill:#ffffff;fill-opacity:1" /> <rect
|
||||
x="410.20001"
|
||||
y="559.09998"
|
||||
class="st4"
|
||||
width="75.199997"
|
||||
height="75.199997"
|
||||
id="rect36"
|
||||
style="fill:#ffffff;fill-opacity:1" /> <rect
|
||||
x="507.5"
|
||||
y="559.09998"
|
||||
class="st4"
|
||||
width="75.199997"
|
||||
height="75.199997"
|
||||
id="rect38" /> <g
|
||||
id="g70"> <circle
|
||||
class="st5"
|
||||
cx="621.90002"
|
||||
cy="624"
|
||||
r="188.7"
|
||||
id="circle40" /> <circle
|
||||
class="st0"
|
||||
cx="621.90002"
|
||||
cy="624"
|
||||
r="148"
|
||||
id="circle42" /> <path
|
||||
class="st6"
|
||||
d="m 486.6,636.8 c 0,-81.7 66.3,-148 148,-148 37.6,0 72,14.1 98.1,37.2 -27.1,-30.6 -66.7,-49.9 -110.8,-49.9 -81.7,0 -148,66.3 -148,148 0,44.1 19.3,83.7 49.9,110.8 -23.1,-26.2 -37.2,-60.5 -37.2,-98.1 z"
|
||||
id="path44" /> <polyline
|
||||
class="st7"
|
||||
points="621.9,530.4 621.9,624 559,624 "
|
||||
id="polyline46" /> <g
|
||||
id="g64"> <line
|
||||
class="st8"
|
||||
x1="621.90002"
|
||||
y1="508.29999"
|
||||
x2="621.90002"
|
||||
y2="497.10001"
|
||||
id="line48" /> <line
|
||||
class="st8"
|
||||
x1="621.90002"
|
||||
y1="756.29999"
|
||||
x2="621.90002"
|
||||
y2="745.09998"
|
||||
id="line50" /> <line
|
||||
class="st8"
|
||||
x1="740.29999"
|
||||
y1="626.70001"
|
||||
x2="751.5"
|
||||
y2="626.70001"
|
||||
id="line52" /> <line
|
||||
class="st8"
|
||||
x1="492.29999"
|
||||
y1="626.70001"
|
||||
x2="503.5"
|
||||
y2="626.70001"
|
||||
id="line54" /> <line
|
||||
class="st8"
|
||||
x1="705.59998"
|
||||
y1="710.40002"
|
||||
x2="713.5"
|
||||
y2="718.29999"
|
||||
id="line56" /> <line
|
||||
class="st8"
|
||||
x1="530.29999"
|
||||
y1="535.09998"
|
||||
x2="538.20001"
|
||||
y2="543"
|
||||
id="line58" /> <line
|
||||
class="st8"
|
||||
x1="538.20001"
|
||||
y1="710.40002"
|
||||
x2="530.29999"
|
||||
y2="718.29999"
|
||||
id="line60" /> <line
|
||||
class="st8"
|
||||
x1="713.5"
|
||||
y1="535.09998"
|
||||
x2="705.59998"
|
||||
y2="543"
|
||||
id="line62" /> </g> <line
|
||||
class="st9"
|
||||
x1="604.40002"
|
||||
y1="606.29999"
|
||||
x2="684.5"
|
||||
y2="687.40002"
|
||||
id="line66" /> <circle
|
||||
class="st10"
|
||||
cx="621.90002"
|
||||
cy="624"
|
||||
r="16.1"
|
||||
id="circle68" /> </g> </g> </svg>
|
||||
|
After Width: | Height: | Size: 5.9 KiB |
@@ -18,9 +18,25 @@ $(document).ready(function () {
|
||||
|
||||
});
|
||||
|
||||
$("#notification-token-toggle").click(function (e) {
|
||||
$(".toggle-show").click(function (e) {
|
||||
e.preventDefault();
|
||||
$('#notification-tokens-info').toggle();
|
||||
let target = $(this).data('target');
|
||||
$(target).toggle();
|
||||
});
|
||||
|
||||
// Time zone config related
|
||||
$(".local-time").each(function (e) {
|
||||
$(this).text(new Date($(this).data("utc")).toLocaleString());
|
||||
})
|
||||
|
||||
const timezoneInput = $('#application-timezone');
|
||||
if(timezoneInput.length) {
|
||||
const timezone = Intl.DateTimeFormat().resolvedOptions().timeZone;
|
||||
if (!timezoneInput.val().trim()) {
|
||||
timezoneInput.val(timezone);
|
||||
timezoneInput.after('<div class="timezone-message">The timezone was set from your browser, <strong>be sure to press save!</strong></div>');
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
/**
|
||||
* debounce
|
||||
* @param {integer} milliseconds This param indicates the number of milliseconds
|
||||
* to wait after the last call before calling the original function.
|
||||
* @param {object} What "this" refers to in the returned function.
|
||||
* @return {function} This returns a function that when called will wait the
|
||||
* indicated number of milliseconds after the last call before
|
||||
* calling the original function.
|
||||
*/
|
||||
Function.prototype.debounce = function (milliseconds, context) {
|
||||
var baseFunction = this,
|
||||
timer = null,
|
||||
wait = milliseconds;
|
||||
|
||||
return function () {
|
||||
var self = context || this,
|
||||
args = arguments;
|
||||
|
||||
function complete() {
|
||||
baseFunction.apply(self, args);
|
||||
timer = null;
|
||||
}
|
||||
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
|
||||
timer = setTimeout(complete, wait);
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* throttle
|
||||
* @param {integer} milliseconds This param indicates the number of milliseconds
|
||||
* to wait between calls before calling the original function.
|
||||
* @param {object} What "this" refers to in the returned function.
|
||||
* @return {function} This returns a function that when called will wait the
|
||||
* indicated number of milliseconds between calls before
|
||||
* calling the original function.
|
||||
*/
|
||||
Function.prototype.throttle = function (milliseconds, context) {
|
||||
var baseFunction = this,
|
||||
lastEventTimestamp = null,
|
||||
limit = milliseconds;
|
||||
|
||||
return function () {
|
||||
var self = context || this,
|
||||
args = arguments,
|
||||
now = Date.now();
|
||||
|
||||
if (!lastEventTimestamp || now - lastEventTimestamp >= limit) {
|
||||
lastEventTimestamp = now;
|
||||
baseFunction.apply(self, args);
|
||||
}
|
||||
};
|
||||
};
|
||||
@@ -28,17 +28,14 @@ $(document).ready(function() {
|
||||
url: notification_base_url,
|
||||
data : data,
|
||||
statusCode: {
|
||||
400: function() {
|
||||
// More than likely the CSRF token was lost when the server restarted
|
||||
alert("There was a problem processing the request, please reload the page.");
|
||||
400: function(data) {
|
||||
// More than likely the CSRF token was lost when the server restarted
|
||||
alert(data.responseText);
|
||||
}
|
||||
}
|
||||
}).done(function(data){
|
||||
console.log(data);
|
||||
alert(data);
|
||||
}).fail(function(data){
|
||||
console.log(data);
|
||||
alert('There was an error communicating with the server.');
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
196
changedetectionio/static/js/plugins.js
Normal file
196
changedetectionio/static/js/plugins.js
Normal file
@@ -0,0 +1,196 @@
|
||||
(function ($) {
|
||||
/**
|
||||
* debounce
|
||||
* @param {integer} milliseconds This param indicates the number of milliseconds
|
||||
* to wait after the last call before calling the original function.
|
||||
* @param {object} What "this" refers to in the returned function.
|
||||
* @return {function} This returns a function that when called will wait the
|
||||
* indicated number of milliseconds after the last call before
|
||||
* calling the original function.
|
||||
*/
|
||||
Function.prototype.debounce = function (milliseconds, context) {
|
||||
var baseFunction = this,
|
||||
timer = null,
|
||||
wait = milliseconds;
|
||||
|
||||
return function () {
|
||||
var self = context || this,
|
||||
args = arguments;
|
||||
|
||||
function complete() {
|
||||
baseFunction.apply(self, args);
|
||||
timer = null;
|
||||
}
|
||||
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
|
||||
timer = setTimeout(complete, wait);
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* throttle
|
||||
* @param {integer} milliseconds This param indicates the number of milliseconds
|
||||
* to wait between calls before calling the original function.
|
||||
* @param {object} What "this" refers to in the returned function.
|
||||
* @return {function} This returns a function that when called will wait the
|
||||
* indicated number of milliseconds between calls before
|
||||
* calling the original function.
|
||||
*/
|
||||
Function.prototype.throttle = function (milliseconds, context) {
|
||||
var baseFunction = this,
|
||||
lastEventTimestamp = null,
|
||||
limit = milliseconds;
|
||||
|
||||
return function () {
|
||||
var self = context || this,
|
||||
args = arguments,
|
||||
now = Date.now();
|
||||
|
||||
if (!lastEventTimestamp || now - lastEventTimestamp >= limit) {
|
||||
lastEventTimestamp = now;
|
||||
baseFunction.apply(self, args);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
$.fn.highlightLines = function (configurations) {
|
||||
return this.each(function () {
|
||||
const $pre = $(this);
|
||||
const textContent = $pre.text();
|
||||
const lines = textContent.split(/\r?\n/); // Handles both \n and \r\n line endings
|
||||
|
||||
// Build a map of line numbers to styles
|
||||
const lineStyles = {};
|
||||
|
||||
configurations.forEach(config => {
|
||||
const {color, lines: lineNumbers} = config;
|
||||
lineNumbers.forEach(lineNumber => {
|
||||
lineStyles[lineNumber] = color;
|
||||
});
|
||||
});
|
||||
|
||||
// Function to escape HTML characters
|
||||
function escapeHtml(text) {
|
||||
return text.replace(/[&<>"'`=\/]/g, function (s) {
|
||||
return "&#" + s.charCodeAt(0) + ";";
|
||||
});
|
||||
}
|
||||
|
||||
// Process each line
|
||||
const processedLines = lines.map((line, index) => {
|
||||
const lineNumber = index + 1; // Line numbers start at 1
|
||||
const escapedLine = escapeHtml(line);
|
||||
const color = lineStyles[lineNumber];
|
||||
|
||||
if (color) {
|
||||
// Wrap the line in a span with inline style
|
||||
return `<span style="background-color: ${color}">${escapedLine}</span>`;
|
||||
} else {
|
||||
return escapedLine;
|
||||
}
|
||||
});
|
||||
|
||||
// Join the lines back together
|
||||
const newContent = processedLines.join('\n');
|
||||
|
||||
// Set the new content as HTML
|
||||
$pre.html(newContent);
|
||||
});
|
||||
};
|
||||
$.fn.miniTabs = function (tabsConfig, options) {
|
||||
const settings = {
|
||||
tabClass: 'minitab',
|
||||
tabsContainerClass: 'minitabs',
|
||||
activeClass: 'active',
|
||||
...(options || {})
|
||||
};
|
||||
|
||||
return this.each(function () {
|
||||
const $wrapper = $(this);
|
||||
const $contents = $wrapper.find('div[id]').hide();
|
||||
const $tabsContainer = $('<div>', {class: settings.tabsContainerClass}).prependTo($wrapper);
|
||||
|
||||
// Generate tabs
|
||||
Object.entries(tabsConfig).forEach(([tabTitle, contentSelector], index) => {
|
||||
const $content = $wrapper.find(contentSelector);
|
||||
if (index === 0) $content.show(); // Show first content by default
|
||||
|
||||
$('<a>', {
|
||||
class: `${settings.tabClass}${index === 0 ? ` ${settings.activeClass}` : ''}`,
|
||||
text: tabTitle,
|
||||
'data-target': contentSelector
|
||||
}).appendTo($tabsContainer);
|
||||
});
|
||||
|
||||
// Tab click event
|
||||
$tabsContainer.on('click', `.${settings.tabClass}`, function (e) {
|
||||
e.preventDefault();
|
||||
const $tab = $(this);
|
||||
const target = $tab.data('target');
|
||||
|
||||
// Update active tab
|
||||
$tabsContainer.find(`.${settings.tabClass}`).removeClass(settings.activeClass);
|
||||
$tab.addClass(settings.activeClass);
|
||||
|
||||
// Show/hide content
|
||||
$contents.hide();
|
||||
$wrapper.find(target).show();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
// Object to store ongoing requests by namespace
|
||||
const requests = {};
|
||||
|
||||
$.abortiveSingularAjax = function (options) {
|
||||
const namespace = options.namespace || 'default';
|
||||
|
||||
// Abort the current request in this namespace if it's still ongoing
|
||||
if (requests[namespace]) {
|
||||
requests[namespace].abort();
|
||||
}
|
||||
|
||||
// Start a new AJAX request and store its reference in the correct namespace
|
||||
requests[namespace] = $.ajax(options);
|
||||
|
||||
// Return the current request in case it's needed
|
||||
return requests[namespace];
|
||||
};
|
||||
})(jQuery);
|
||||
|
||||
|
||||
|
||||
function toggleOpacity(checkboxSelector, fieldSelector, inverted) {
|
||||
const checkbox = document.querySelector(checkboxSelector);
|
||||
const fields = document.querySelectorAll(fieldSelector);
|
||||
|
||||
function updateOpacity() {
|
||||
const opacityValue = !checkbox.checked ? (inverted ? 0.6 : 1) : (inverted ? 1 : 0.6);
|
||||
fields.forEach(field => {
|
||||
field.style.opacity = opacityValue;
|
||||
});
|
||||
}
|
||||
|
||||
// Initial setup
|
||||
updateOpacity();
|
||||
checkbox.addEventListener('change', updateOpacity);
|
||||
}
|
||||
|
||||
function toggleVisibility(checkboxSelector, fieldSelector, inverted) {
|
||||
const checkbox = document.querySelector(checkboxSelector);
|
||||
const fields = document.querySelectorAll(fieldSelector);
|
||||
|
||||
function updateOpacity() {
|
||||
const opacityValue = !checkbox.checked ? (inverted ? 'none' : 'block') : (inverted ? 'block' : 'none');
|
||||
fields.forEach(field => {
|
||||
field.style.display = opacityValue;
|
||||
});
|
||||
}
|
||||
|
||||
// Initial setup
|
||||
updateOpacity();
|
||||
checkbox.addEventListener('change', updateOpacity);
|
||||
}
|
||||
@@ -1,53 +1,63 @@
|
||||
function redirect_to_version(version) {
|
||||
var currentUrl = window.location.href;
|
||||
var baseUrl = currentUrl.split('?')[0]; // Base URL without query parameters
|
||||
function redirectToVersion(version) {
|
||||
var currentUrl = window.location.href.split('?')[0]; // Base URL without query parameters
|
||||
var anchor = '';
|
||||
|
||||
// Check if there is an anchor
|
||||
if (baseUrl.indexOf('#') !== -1) {
|
||||
anchor = baseUrl.substring(baseUrl.indexOf('#'));
|
||||
baseUrl = baseUrl.substring(0, baseUrl.indexOf('#'));
|
||||
if (currentUrl.indexOf('#') !== -1) {
|
||||
anchor = currentUrl.substring(currentUrl.indexOf('#'));
|
||||
currentUrl = currentUrl.substring(0, currentUrl.indexOf('#'));
|
||||
}
|
||||
window.location.href = baseUrl + '?version=' + version + anchor;
|
||||
|
||||
window.location.href = currentUrl + '?version=' + version + anchor;
|
||||
}
|
||||
|
||||
document.addEventListener('keydown', function (event) {
|
||||
var selectElement = document.getElementById('preview-version');
|
||||
if (selectElement) {
|
||||
var selectedOption = selectElement.querySelector('option:checked');
|
||||
if (selectedOption) {
|
||||
if (event.key === 'ArrowLeft') {
|
||||
if (selectedOption.previousElementSibling) {
|
||||
redirect_to_version(selectedOption.previousElementSibling.value);
|
||||
}
|
||||
} else if (event.key === 'ArrowRight') {
|
||||
if (selectedOption.nextElementSibling) {
|
||||
redirect_to_version(selectedOption.nextElementSibling.value);
|
||||
}
|
||||
function setupDateWidget() {
|
||||
$(document).on('keydown', function (event) {
|
||||
var $selectElement = $('#preview-version');
|
||||
var $selectedOption = $selectElement.find('option:selected');
|
||||
|
||||
if ($selectedOption.length) {
|
||||
if (event.key === 'ArrowLeft' && $selectedOption.prev().length) {
|
||||
redirectToVersion($selectedOption.prev().val());
|
||||
} else if (event.key === 'ArrowRight' && $selectedOption.next().length) {
|
||||
redirectToVersion($selectedOption.next().val());
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
$('#preview-version').on('change', function () {
|
||||
redirectToVersion($(this).val());
|
||||
});
|
||||
|
||||
document.getElementById('preview-version').addEventListener('change', function () {
|
||||
redirect_to_version(this.value);
|
||||
});
|
||||
var $selectedOption = $('#preview-version option:selected');
|
||||
|
||||
var selectElement = document.getElementById('preview-version');
|
||||
if (selectElement) {
|
||||
var selectedOption = selectElement.querySelector('option:checked');
|
||||
if (selectedOption) {
|
||||
if (selectedOption.previousElementSibling) {
|
||||
document.getElementById('btn-previous').href = "?version=" + selectedOption.previousElementSibling.value;
|
||||
if ($selectedOption.length) {
|
||||
var $prevOption = $selectedOption.prev();
|
||||
var $nextOption = $selectedOption.next();
|
||||
|
||||
if ($prevOption.length) {
|
||||
$('#btn-previous').attr('href', '?version=' + $prevOption.val());
|
||||
} else {
|
||||
document.getElementById('btn-previous').remove()
|
||||
}
|
||||
if (selectedOption.nextElementSibling) {
|
||||
document.getElementById('btn-next').href = "?version=" + selectedOption.nextElementSibling.value;
|
||||
} else {
|
||||
document.getElementById('btn-next').remove()
|
||||
$('#btn-previous').remove();
|
||||
}
|
||||
|
||||
if ($nextOption.length) {
|
||||
$('#btn-next').attr('href', '?version=' + $nextOption.val());
|
||||
} else {
|
||||
$('#btn-next').remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
if ($('#preview-version').length) {
|
||||
setupDateWidget();
|
||||
}
|
||||
|
||||
$('#diff-col > pre').highlightLines([
|
||||
{
|
||||
'color': '#ee0000',
|
||||
'lines': triggered_line_numbers
|
||||
}
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
$(function () {
|
||||
/* add container before each proxy location to show status */
|
||||
|
||||
var option_li = $('.fetch-backend-proxy li').filter(function() {
|
||||
return $("input",this)[0].value.length >0;
|
||||
});
|
||||
|
||||
//var option_li = $('.fetch-backend-proxy li');
|
||||
var isActive = false;
|
||||
$(option_li).prepend('<div class="proxy-status"></div>');
|
||||
$(option_li).append('<div class="proxy-timing"></div><div class="proxy-check-details"></div>');
|
||||
|
||||
function setup_html_widget() {
|
||||
var option_li = $('.fetch-backend-proxy li').filter(function () {
|
||||
return $("input", this)[0].value.length > 0;
|
||||
});
|
||||
$(option_li).prepend('<div class="proxy-status"></div>');
|
||||
$(option_li).append('<div class="proxy-timing"></div><div class="proxy-check-details"></div>');
|
||||
}
|
||||
|
||||
function set_proxy_check_status(proxy_key, state) {
|
||||
// select input by value name
|
||||
@@ -59,8 +59,14 @@ $(function () {
|
||||
}
|
||||
|
||||
$('#check-all-proxies').click(function (e) {
|
||||
|
||||
e.preventDefault()
|
||||
$('body').addClass('proxy-check-active');
|
||||
|
||||
if (!$('body').hasClass('proxy-check-active')) {
|
||||
setup_html_widget();
|
||||
$('body').addClass('proxy-check-active');
|
||||
}
|
||||
|
||||
$('.proxy-check-details').html('');
|
||||
$('.proxy-status').html('<span class="spinner"></span>').fadeIn();
|
||||
$('.proxy-timing').html('');
|
||||
|
||||
109
changedetectionio/static/js/scheduler.js
Normal file
109
changedetectionio/static/js/scheduler.js
Normal file
@@ -0,0 +1,109 @@
|
||||
function getTimeInTimezone(timezone) {
|
||||
const now = new Date();
|
||||
const options = {
|
||||
timeZone: timezone,
|
||||
weekday: 'long',
|
||||
year: 'numeric',
|
||||
hour12: false,
|
||||
month: '2-digit',
|
||||
day: '2-digit',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
second: '2-digit',
|
||||
};
|
||||
|
||||
const formatter = new Intl.DateTimeFormat('en-US', options);
|
||||
return formatter.format(now);
|
||||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
|
||||
let exceedsLimit = false;
|
||||
const warning_text = $("#timespan-warning")
|
||||
const timezone_text_widget = $("input[id*='time_schedule_limit-timezone']")
|
||||
|
||||
toggleVisibility('#time_schedule_limit-enabled, #requests-time_schedule_limit-enabled', '#schedule-day-limits-wrapper', true)
|
||||
|
||||
setInterval(() => {
|
||||
let success = true;
|
||||
try {
|
||||
// Show the current local time according to either placeholder or entered TZ name
|
||||
if (timezone_text_widget.val().length) {
|
||||
$('#local-time-in-tz').text(getTimeInTimezone(timezone_text_widget.val()));
|
||||
} else {
|
||||
// So maybe use what is in the placeholder (which will be the default settings)
|
||||
$('#local-time-in-tz').text(getTimeInTimezone(timezone_text_widget.attr('placeholder')));
|
||||
}
|
||||
} catch (error) {
|
||||
success = false;
|
||||
$('#local-time-in-tz').text("");
|
||||
console.error(timezone_text_widget.val())
|
||||
}
|
||||
|
||||
$(timezone_text_widget).toggleClass('error', !success);
|
||||
|
||||
}, 500);
|
||||
|
||||
$('#schedule-day-limits-wrapper').on('change click blur', 'input, checkbox, select', function() {
|
||||
|
||||
let allOk = true;
|
||||
|
||||
// Controls setting the warning that the time could overlap into the next day
|
||||
$("li.day-schedule").each(function () {
|
||||
const $schedule = $(this);
|
||||
const $checkbox = $schedule.find("input[type='checkbox']");
|
||||
|
||||
if ($checkbox.is(":checked")) {
|
||||
const timeValue = $schedule.find("input[type='time']").val();
|
||||
const durationHours = parseInt($schedule.find("select[name*='-duration-hours']").val(), 10) || 0;
|
||||
const durationMinutes = parseInt($schedule.find("select[name*='-duration-minutes']").val(), 10) || 0;
|
||||
|
||||
if (timeValue) {
|
||||
const [startHours, startMinutes] = timeValue.split(":").map(Number);
|
||||
const totalMinutes = (startHours * 60 + startMinutes) + (durationHours * 60 + durationMinutes);
|
||||
|
||||
exceedsLimit = totalMinutes > 1440
|
||||
if (exceedsLimit) {
|
||||
allOk = false
|
||||
}
|
||||
// Set the row/day-of-week highlight
|
||||
$schedule.toggleClass("warning", exceedsLimit);
|
||||
}
|
||||
} else {
|
||||
$schedule.toggleClass("warning", false);
|
||||
}
|
||||
});
|
||||
|
||||
warning_text.toggle(!allOk)
|
||||
});
|
||||
|
||||
$('table[id*="time_schedule_limit-saturday"], table[id*="time_schedule_limit-sunday"]').addClass("weekend-day")
|
||||
|
||||
// Presets [weekend] [business hours] etc
|
||||
$(document).on('click', '[data-template].set-schedule', function () {
|
||||
// Get the value of the 'data-template' attribute
|
||||
switch ($(this).attr('data-template')) {
|
||||
case 'business-hours':
|
||||
$('.day-schedule table:not(.weekend-day) input[type="time"]').val('09:00')
|
||||
$('.day-schedule table:not(.weekend-day) select[id*="-duration-hours"]').val('8');
|
||||
$('.day-schedule table:not(.weekend-day) select[id*="-duration-minutes"]').val('0');
|
||||
$('.day-schedule input[id*="-enabled"]').prop('checked', true);
|
||||
$('.day-schedule .weekend-day input[id*="-enabled"]').prop('checked', false);
|
||||
break;
|
||||
case 'weekend':
|
||||
$('.day-schedule .weekend-day input[type="time"][id$="start-time"]').val('00:00')
|
||||
$('.day-schedule .weekend-day select[id*="-duration-hours"]').val('24');
|
||||
$('.day-schedule .weekend-day select[id*="-duration-minutes"]').val('0');
|
||||
$('.day-schedule input[id*="-enabled"]').prop('checked', false);
|
||||
$('.day-schedule .weekend-day input[id*="-enabled"]').prop('checked', true);
|
||||
break;
|
||||
case 'reset':
|
||||
|
||||
$('.day-schedule input[type="time"]').val('00:00')
|
||||
$('.day-schedule select[id*="-duration-hours"]').val('24');
|
||||
$('.day-schedule select[id*="-duration-minutes"]').val('0');
|
||||
$('.day-schedule input[id*="-enabled"]').prop('checked', true);
|
||||
break;
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -26,8 +26,7 @@ function set_active_tab() {
|
||||
if (tab.length) {
|
||||
tab[0].parentElement.className = "active";
|
||||
}
|
||||
// hash could move the page down
|
||||
window.scrollTo(0, 0);
|
||||
|
||||
}
|
||||
|
||||
function focus_error_tab() {
|
||||
|
||||
@@ -49,4 +49,9 @@ $(document).ready(function () {
|
||||
$("#overlay").toggleClass('visible');
|
||||
heartpath.style.fill = document.getElementById("overlay").classList.contains("visible") ? '#ff0000' : 'var(--color-background)';
|
||||
});
|
||||
|
||||
setInterval(function () {
|
||||
$('body').toggleClass('spinner-active', $.active > 0);
|
||||
}, 2000);
|
||||
|
||||
});
|
||||
|
||||
@@ -132,6 +132,7 @@ $(document).ready(() => {
|
||||
}).done((data) => {
|
||||
$fetchingUpdateNoticeElem.html("Rendering..");
|
||||
selectorData = data;
|
||||
|
||||
sortScrapedElementsBySize();
|
||||
console.log(`Reported browser width from backend: ${data['browser_width']}`);
|
||||
|
||||
|
||||
@@ -1,18 +1,51 @@
|
||||
function toggleOpacity(checkboxSelector, fieldSelector) {
|
||||
const checkbox = document.querySelector(checkboxSelector);
|
||||
const fields = document.querySelectorAll(fieldSelector);
|
||||
function updateOpacity() {
|
||||
const opacityValue = checkbox.checked ? 0.6 : 1;
|
||||
fields.forEach(field => {
|
||||
field.style.opacity = opacityValue;
|
||||
});
|
||||
|
||||
function request_textpreview_update() {
|
||||
if (!$('body').hasClass('preview-text-enabled')) {
|
||||
console.error("Preview text was requested but body tag was not setup")
|
||||
return
|
||||
}
|
||||
// Initial setup
|
||||
updateOpacity();
|
||||
checkbox.addEventListener('change', updateOpacity);
|
||||
|
||||
const data = {};
|
||||
$('textarea:visible, input:visible').each(function () {
|
||||
const $element = $(this); // Cache the jQuery object for the current element
|
||||
const name = $element.attr('name'); // Get the name attribute of the element
|
||||
data[name] = $element.is(':checkbox') ? ($element.is(':checked') ? $element.val() : false) : $element.val();
|
||||
});
|
||||
|
||||
$('body').toggleClass('spinner-active', 1);
|
||||
|
||||
$.abortiveSingularAjax({
|
||||
type: "POST",
|
||||
url: preview_text_edit_filters_url,
|
||||
data: data,
|
||||
namespace: 'watchEdit'
|
||||
}).done(function (data) {
|
||||
console.debug(data['duration'])
|
||||
$('#filters-and-triggers #text-preview-before-inner').text(data['before_filter']);
|
||||
$('#filters-and-triggers #text-preview-inner')
|
||||
.text(data['after_filter'])
|
||||
.highlightLines([
|
||||
{
|
||||
'color': '#ee0000',
|
||||
'lines': data['trigger_line_numbers']
|
||||
},
|
||||
{
|
||||
'color': '#757575',
|
||||
'lines': data['ignore_line_numbers']
|
||||
}
|
||||
])
|
||||
}).fail(function (error) {
|
||||
if (error.statusText === 'abort') {
|
||||
console.log('Request was aborted due to a new request being fired.');
|
||||
} else {
|
||||
$('#filters-and-triggers #text-preview-inner').text('There was an error communicating with the server.');
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
$(document).ready(function () {
|
||||
|
||||
$('#notification-setting-reset-to-default').click(function (e) {
|
||||
$('#notification_title').val('');
|
||||
$('#notification_body').val('');
|
||||
@@ -25,6 +58,24 @@ $(document).ready(function () {
|
||||
$('#notification-tokens-info').toggle();
|
||||
});
|
||||
|
||||
toggleOpacity('#time_between_check_use_default', '#time_between_check');
|
||||
toggleOpacity('#time_between_check_use_default', '#time-check-widget-wrapper, #time-between-check-schedule', false);
|
||||
|
||||
|
||||
const vh = Math.max(document.documentElement.clientHeight || 0, window.innerHeight || 0);
|
||||
$("#text-preview-inner").css('max-height', (vh - 300) + "px");
|
||||
$("#text-preview-before-inner").css('max-height', (vh - 300) + "px");
|
||||
|
||||
$("#activate-text-preview").click(function (e) {
|
||||
$('body').toggleClass('preview-text-enabled')
|
||||
request_textpreview_update();
|
||||
const method = $('body').hasClass('preview-text-enabled') ? 'on' : 'off';
|
||||
$('#filters-and-triggers textarea')[method]('blur', request_textpreview_update.throttle(1000));
|
||||
$('#filters-and-triggers input')[method]('change', request_textpreview_update.throttle(1000));
|
||||
$("#filters-and-triggers-tab")[method]('click', request_textpreview_update.throttle(1000));
|
||||
});
|
||||
$('.minitabs-wrapper').miniTabs({
|
||||
"Content after filters": "#text-preview-inner",
|
||||
"Content raw/before filters": "#text-preview-before-inner"
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -153,7 +153,8 @@ html[data-darkmode="true"] {
|
||||
border: 1px solid transparent;
|
||||
vertical-align: top;
|
||||
font: 1em monospace;
|
||||
text-align: left; }
|
||||
text-align: left;
|
||||
overflow: clip; }
|
||||
#diff-ui pre {
|
||||
white-space: pre-wrap; }
|
||||
|
||||
@@ -172,7 +173,9 @@ ins {
|
||||
text-decoration: none; }
|
||||
|
||||
#result {
|
||||
white-space: pre-wrap; }
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word; }
|
||||
|
||||
#settings {
|
||||
background: rgba(0, 0, 0, 0.05);
|
||||
@@ -231,3 +234,12 @@ td#diff-col div {
|
||||
border-radius: 5px;
|
||||
background: var(--color-background);
|
||||
box-shadow: 1px 1px 4px var(--color-shadow-jump); }
|
||||
|
||||
.pure-form button.reset-margin {
|
||||
margin: 0px; }
|
||||
|
||||
.diff-fieldset {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
flex-wrap: wrap; }
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
vertical-align: top;
|
||||
font: 1em monospace;
|
||||
text-align: left;
|
||||
overflow: clip; // clip overflowing contents to cell boundariess
|
||||
}
|
||||
|
||||
pre {
|
||||
@@ -50,6 +51,8 @@ ins {
|
||||
|
||||
#result {
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
|
||||
.change {
|
||||
span {}
|
||||
@@ -134,3 +137,15 @@ td#diff-col div {
|
||||
background: var(--color-background);
|
||||
box-shadow: 1px 1px 4px var(--color-shadow-jump);
|
||||
}
|
||||
|
||||
// resets button margin to 0px
|
||||
.pure-form button.reset-margin {
|
||||
margin: 0px;
|
||||
}
|
||||
|
||||
.diff-fieldset {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
@@ -40,15 +40,29 @@
|
||||
}
|
||||
}
|
||||
|
||||
#browser-steps-fieldlist {
|
||||
height: 100%;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
|
||||
#browser-steps .flex-wrapper {
|
||||
display: flex;
|
||||
flex-flow: row;
|
||||
height: 70vh;
|
||||
font-size: 80%;
|
||||
#browser-steps-ui {
|
||||
flex-grow: 1; /* Allow it to grow and fill the available space */
|
||||
flex-shrink: 1; /* Allow it to shrink if needed */
|
||||
flex-basis: 0; /* Start with 0 base width so it stretches as much as possible */
|
||||
background-color: #eee;
|
||||
border-radius: 5px;
|
||||
|
||||
}
|
||||
|
||||
#browser-steps-fieldlist {
|
||||
flex-grow: 0; /* Don't allow it to grow */
|
||||
flex-shrink: 0; /* Don't allow it to shrink */
|
||||
flex-basis: auto; /* Base width is determined by the content */
|
||||
max-width: 400px; /* Set a max width to prevent overflow */
|
||||
padding-left: 1rem;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
}
|
||||
|
||||
/* this is duplicate :( */
|
||||
|
||||
@@ -11,7 +11,22 @@ ul#requests-extra_browsers {
|
||||
/* each proxy entry is a `table` */
|
||||
table {
|
||||
tr {
|
||||
display: inline;
|
||||
display: table-row; // default display for small screens
|
||||
input[type=text] {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// apply inline display for larger screens
|
||||
@media only screen and (min-width: 1280px) {
|
||||
table {
|
||||
tr {
|
||||
display: inline;
|
||||
input[type=text] {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,19 @@ ul#requests-extra_proxies {
|
||||
/* each proxy entry is a `table` */
|
||||
table {
|
||||
tr {
|
||||
display: inline;
|
||||
display: table-row; // default display for small screens
|
||||
input[type=text] {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// apply inline display for large screens
|
||||
@media only screen and (min-width: 1024px) {
|
||||
table {
|
||||
tr {
|
||||
display: inline;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -25,15 +37,19 @@ ul#requests-extra_proxies {
|
||||
|
||||
body.proxy-check-active {
|
||||
#request {
|
||||
// Padding set by flex layout
|
||||
/*
|
||||
.proxy-status {
|
||||
width: 2em;
|
||||
}
|
||||
*/
|
||||
|
||||
.proxy-check-details {
|
||||
font-size: 80%;
|
||||
color: #555;
|
||||
display: block;
|
||||
padding-left: 4em;
|
||||
padding-left: 2em;
|
||||
max-width: 500px;
|
||||
}
|
||||
|
||||
.proxy-timing {
|
||||
|
||||
47
changedetectionio/static/styles/scss/parts/_minitabs.scss
Normal file
47
changedetectionio/static/styles/scss/parts/_minitabs.scss
Normal file
@@ -0,0 +1,47 @@
|
||||
.minitabs-wrapper {
|
||||
width: 100%;
|
||||
|
||||
> div[id] {
|
||||
padding: 20px;
|
||||
border: 1px solid #ccc;
|
||||
border-top: none;
|
||||
}
|
||||
|
||||
.minitabs-content {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
> div {
|
||||
flex: 1 1 auto;
|
||||
min-width: 0;
|
||||
overflow: scroll;
|
||||
}
|
||||
}
|
||||
|
||||
.minitabs {
|
||||
display: flex;
|
||||
border-bottom: 1px solid #ccc;
|
||||
}
|
||||
|
||||
.minitab {
|
||||
flex: 1;
|
||||
text-align: center;
|
||||
padding: 12px 0;
|
||||
text-decoration: none;
|
||||
color: #333;
|
||||
background-color: #f1f1f1;
|
||||
border: 1px solid #ccc;
|
||||
border-bottom: none;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.3s;
|
||||
}
|
||||
|
||||
.minitab:hover {
|
||||
background-color: #ddd;
|
||||
}
|
||||
|
||||
.minitab.active {
|
||||
background-color: #fff;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
@import "minitabs";
|
||||
|
||||
body.preview-text-enabled {
|
||||
|
||||
@media (min-width: 800px) {
|
||||
#filters-and-triggers > div {
|
||||
display: flex; /* Establishes Flexbox layout */
|
||||
gap: 20px; /* Adds space between the columns */
|
||||
position: relative; /* Ensures the sticky positioning is relative to this parent */
|
||||
}
|
||||
}
|
||||
|
||||
/* layout of the page */
|
||||
#edit-text-filter, #text-preview {
|
||||
flex: 1; /* Each column takes an equal amount of available space */
|
||||
align-self: flex-start; /* Aligns the right column to the start, allowing it to maintain its content height */
|
||||
}
|
||||
|
||||
#edit-text-filter {
|
||||
#pro-tips {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
#text-preview {
|
||||
position: sticky;
|
||||
top: 20px;
|
||||
padding-top: 1rem;
|
||||
padding-bottom: 1rem;
|
||||
display: block !important;
|
||||
}
|
||||
|
||||
#activate-text-preview {
|
||||
background-color: var(--color-grey-500);
|
||||
}
|
||||
|
||||
/* actual preview area */
|
||||
.monospace-preview {
|
||||
background: var(--color-background-input);
|
||||
border: 1px solid var(--color-grey-600);
|
||||
padding: 1rem;
|
||||
color: var(--color-text-input);
|
||||
font-family: "Courier New", Courier, monospace; /* Sets the font to a monospace type */
|
||||
font-size: 70%;
|
||||
word-break: break-word;
|
||||
white-space: pre-wrap; /* Preserves whitespace and line breaks like <pre> */
|
||||
}
|
||||
}
|
||||
|
||||
#activate-text-preview {
|
||||
right: 0;
|
||||
position: absolute;
|
||||
z-index: 3;
|
||||
box-shadow: 1px 1px 4px var(--color-shadow-jump);
|
||||
}
|
||||
@@ -12,6 +12,7 @@
|
||||
@import "parts/_darkmode";
|
||||
@import "parts/_menu";
|
||||
@import "parts/_love";
|
||||
@import "parts/preview_text_filter";
|
||||
|
||||
body {
|
||||
color: var(--color-text);
|
||||
@@ -105,10 +106,34 @@ button.toggle-button {
|
||||
padding: 5px;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
border-bottom: 2px solid var(--color-menu-accent);
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
#pure-menu-horizontal-spinner {
|
||||
height: 3px;
|
||||
background: linear-gradient(-75deg, #ff6000, #ff8f00, #ffdd00, #ed0000);
|
||||
background-size: 400% 400%;
|
||||
width: 100%;
|
||||
animation: gradient 200s ease infinite;
|
||||
}
|
||||
|
||||
body.spinner-active {
|
||||
#pure-menu-horizontal-spinner {
|
||||
animation: gradient 1s ease infinite;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes gradient {
|
||||
0% {
|
||||
background-position: 0% 50%;
|
||||
}
|
||||
50% {
|
||||
background-position: 100% 50%;
|
||||
}
|
||||
100% {
|
||||
background-position: 0% 50%;
|
||||
}
|
||||
}
|
||||
.pure-menu-heading {
|
||||
color: var(--color-text-menu-heading);
|
||||
}
|
||||
@@ -122,8 +147,14 @@ button.toggle-button {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.tab-pane-inner {
|
||||
// .tab-pane-inner will have the #id that the tab button jumps/anchors to
|
||||
scroll-margin-top: 200px;
|
||||
}
|
||||
|
||||
section.content {
|
||||
padding-top: 5em;
|
||||
padding-top: 100px;
|
||||
padding-bottom: 1em;
|
||||
flex-direction: column;
|
||||
display: flex;
|
||||
@@ -186,12 +217,17 @@ code {
|
||||
}
|
||||
}
|
||||
|
||||
.watch-tag-list {
|
||||
color: var(--color-white);
|
||||
.inline-tag {
|
||||
white-space: nowrap;
|
||||
background: var(--color-text-watch-tag-list);
|
||||
border-radius: 5px;
|
||||
padding: 2px 5px;
|
||||
margin-right: 4px;
|
||||
}
|
||||
|
||||
.watch-tag-list {
|
||||
color: var(--color-white);
|
||||
background: var(--color-text-watch-tag-list);
|
||||
@extend .inline-tag;
|
||||
}
|
||||
|
||||
.box {
|
||||
@@ -315,10 +351,6 @@ a.pure-button-selected {
|
||||
background: var(--color-background-button-cancel);
|
||||
}
|
||||
|
||||
#save_button {
|
||||
margin-right: 1rem;
|
||||
}
|
||||
|
||||
.messages {
|
||||
li {
|
||||
list-style: none;
|
||||
@@ -615,9 +647,9 @@ footer {
|
||||
list-style: none;
|
||||
|
||||
li {
|
||||
>* {
|
||||
display: inline-block;
|
||||
}
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 1em;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -677,6 +709,12 @@ footer {
|
||||
tr {
|
||||
th {
|
||||
display: inline-block;
|
||||
// Hide the "Last" text for smaller screens
|
||||
@media (max-width: 768px) {
|
||||
.hide-on-mobile {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.empty-cell {
|
||||
@@ -692,6 +730,24 @@ footer {
|
||||
}
|
||||
}
|
||||
|
||||
tbody {
|
||||
tr {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
|
||||
// The third child of each row will take up the remaining space
|
||||
// This is useful for the URL column, which should expand to fill the remaining space
|
||||
:nth-child(3) {
|
||||
flex-grow: 1;
|
||||
}
|
||||
// The last three children (from the end) of each row will take up the full width
|
||||
// This is useful for the "Last Checked", "Last Changed", and the action buttons columns, which should each take up the full width
|
||||
:nth-last-child(-n+3) {
|
||||
flex-basis: 100%;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.last-checked {
|
||||
>span {
|
||||
vertical-align: middle;
|
||||
@@ -810,6 +866,11 @@ textarea::placeholder {
|
||||
- We dont use 'size' with <input> because `size` is too unreliable to override, and will often push-out
|
||||
- Rely always on width in CSS
|
||||
*/
|
||||
/** Set max width for input field */
|
||||
.m-d {
|
||||
min-width: 100%;
|
||||
}
|
||||
|
||||
@media only screen and (min-width: 761px) {
|
||||
|
||||
/* m-d is medium-desktop */
|
||||
@@ -876,6 +937,7 @@ $form-edge-padding: 20px;
|
||||
}
|
||||
|
||||
.tab-pane-inner {
|
||||
|
||||
&:not(:target) {
|
||||
display: none;
|
||||
}
|
||||
@@ -925,6 +987,13 @@ body.full-width {
|
||||
background: var(--color-background);
|
||||
}
|
||||
|
||||
/* Make action buttons have consistent size and spacing */
|
||||
#actions .pure-control-group {
|
||||
display: flex;
|
||||
gap: 0.625em;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.pure-form-message-inline {
|
||||
padding-left: 0;
|
||||
color: var(--color-text-input-description);
|
||||
@@ -968,6 +1037,28 @@ ul {
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 760px) {
|
||||
.time-check-widget {
|
||||
tbody {
|
||||
display: grid;
|
||||
grid-template-columns: auto 1fr auto 1fr;
|
||||
gap: 0.625em 0.3125em;
|
||||
align-items: center;
|
||||
}
|
||||
tr {
|
||||
display: contents;
|
||||
th {
|
||||
text-align: right;
|
||||
padding-right: 5px;
|
||||
}
|
||||
input[type="number"] {
|
||||
width: 100%;
|
||||
max-width: 5em;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@import "parts/_visualselector";
|
||||
|
||||
#webdriver_delay {
|
||||
@@ -1061,9 +1152,8 @@ ul {
|
||||
.tracking-ldjson-price-data {
|
||||
background-color: var(--color-background-button-green);
|
||||
color: #000;
|
||||
padding: 3px;
|
||||
border-radius: 3px;
|
||||
white-space: nowrap;
|
||||
opacity: 0.6;
|
||||
@extend .inline-tag;
|
||||
}
|
||||
|
||||
.ldjson-price-track-offer {
|
||||
@@ -1109,9 +1199,17 @@ ul {
|
||||
background-color: var(--color-background-button-cancel);
|
||||
color: #777;
|
||||
}
|
||||
padding: 3px;
|
||||
border-radius: 3px;
|
||||
white-space: nowrap;
|
||||
&.error {
|
||||
background-color: var(--color-background-button-error);
|
||||
color: #fff;
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
svg {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
@extend .inline-tag;
|
||||
}
|
||||
|
||||
#chrome-extension-link {
|
||||
|
||||
@@ -46,14 +46,31 @@
|
||||
#browser_steps li > label {
|
||||
display: none; }
|
||||
|
||||
#browser-steps-fieldlist {
|
||||
height: 100%;
|
||||
overflow-y: scroll; }
|
||||
|
||||
#browser-steps .flex-wrapper {
|
||||
display: flex;
|
||||
flex-flow: row;
|
||||
height: 70vh; }
|
||||
height: 70vh;
|
||||
font-size: 80%; }
|
||||
#browser-steps .flex-wrapper #browser-steps-ui {
|
||||
flex-grow: 1;
|
||||
/* Allow it to grow and fill the available space */
|
||||
flex-shrink: 1;
|
||||
/* Allow it to shrink if needed */
|
||||
flex-basis: 0;
|
||||
/* Start with 0 base width so it stretches as much as possible */
|
||||
background-color: #eee;
|
||||
border-radius: 5px; }
|
||||
#browser-steps .flex-wrapper #browser-steps-fieldlist {
|
||||
flex-grow: 0;
|
||||
/* Don't allow it to grow */
|
||||
flex-shrink: 0;
|
||||
/* Don't allow it to shrink */
|
||||
flex-basis: auto;
|
||||
/* Base width is determined by the content */
|
||||
max-width: 400px;
|
||||
/* Set a max width to prevent overflow */
|
||||
padding-left: 1rem;
|
||||
overflow-y: scroll; }
|
||||
|
||||
/* this is duplicate :( */
|
||||
#browsersteps-selector-wrapper {
|
||||
@@ -95,26 +112,34 @@ ul#requests-extra_proxies {
|
||||
ul#requests-extra_proxies li > label {
|
||||
display: none; }
|
||||
ul#requests-extra_proxies table tr {
|
||||
display: inline; }
|
||||
display: table-row; }
|
||||
ul#requests-extra_proxies table tr input[type=text] {
|
||||
width: 100%; }
|
||||
@media only screen and (min-width: 1024px) {
|
||||
ul#requests-extra_proxies table tr {
|
||||
display: inline; } }
|
||||
|
||||
#request {
|
||||
/* Auto proxy scan/checker */ }
|
||||
#request label[for=proxy] {
|
||||
display: inline-block; }
|
||||
|
||||
body.proxy-check-active #request .proxy-status {
|
||||
width: 2em; }
|
||||
|
||||
body.proxy-check-active #request .proxy-check-details {
|
||||
font-size: 80%;
|
||||
color: #555;
|
||||
display: block;
|
||||
padding-left: 4em; }
|
||||
|
||||
body.proxy-check-active #request .proxy-timing {
|
||||
font-size: 80%;
|
||||
padding-left: 1rem;
|
||||
color: var(--color-link); }
|
||||
body.proxy-check-active #request {
|
||||
/*
|
||||
.proxy-status {
|
||||
width: 2em;
|
||||
}
|
||||
*/ }
|
||||
body.proxy-check-active #request .proxy-check-details {
|
||||
font-size: 80%;
|
||||
color: #555;
|
||||
display: block;
|
||||
padding-left: 2em;
|
||||
max-width: 500px; }
|
||||
body.proxy-check-active #request .proxy-timing {
|
||||
font-size: 80%;
|
||||
padding-left: 1rem;
|
||||
color: var(--color-link); }
|
||||
|
||||
#recommended-proxy {
|
||||
display: grid;
|
||||
@@ -141,7 +166,14 @@ ul#requests-extra_browsers {
|
||||
ul#requests-extra_browsers li > label {
|
||||
display: none; }
|
||||
ul#requests-extra_browsers table tr {
|
||||
display: inline; }
|
||||
display: table-row; }
|
||||
ul#requests-extra_browsers table tr input[type=text] {
|
||||
width: 100%; }
|
||||
@media only screen and (min-width: 1280px) {
|
||||
ul#requests-extra_browsers table tr {
|
||||
display: inline; }
|
||||
ul#requests-extra_browsers table tr input[type=text] {
|
||||
width: 100%; } }
|
||||
|
||||
#extra-browsers-setting {
|
||||
border: 1px solid var(--color-grey-800);
|
||||
@@ -411,6 +443,83 @@ html[data-darkmode="true"] #toggle-light-mode .icon-dark {
|
||||
fill: #ff0000 !important;
|
||||
transition: all ease 0.3s !important; }
|
||||
|
||||
.minitabs-wrapper {
|
||||
width: 100%; }
|
||||
.minitabs-wrapper > div[id] {
|
||||
padding: 20px;
|
||||
border: 1px solid #ccc;
|
||||
border-top: none; }
|
||||
.minitabs-wrapper .minitabs-content {
|
||||
width: 100%;
|
||||
display: flex; }
|
||||
.minitabs-wrapper .minitabs-content > div {
|
||||
flex: 1 1 auto;
|
||||
min-width: 0;
|
||||
overflow: scroll; }
|
||||
.minitabs-wrapper .minitabs {
|
||||
display: flex;
|
||||
border-bottom: 1px solid #ccc; }
|
||||
.minitabs-wrapper .minitab {
|
||||
flex: 1;
|
||||
text-align: center;
|
||||
padding: 12px 0;
|
||||
text-decoration: none;
|
||||
color: #333;
|
||||
background-color: #f1f1f1;
|
||||
border: 1px solid #ccc;
|
||||
border-bottom: none;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.3s; }
|
||||
.minitabs-wrapper .minitab:hover {
|
||||
background-color: #ddd; }
|
||||
.minitabs-wrapper .minitab.active {
|
||||
background-color: #fff;
|
||||
font-weight: bold; }
|
||||
|
||||
body.preview-text-enabled {
|
||||
/* layout of the page */
|
||||
/* actual preview area */ }
|
||||
@media (min-width: 800px) {
|
||||
body.preview-text-enabled #filters-and-triggers > div {
|
||||
display: flex;
|
||||
/* Establishes Flexbox layout */
|
||||
gap: 20px;
|
||||
/* Adds space between the columns */
|
||||
position: relative;
|
||||
/* Ensures the sticky positioning is relative to this parent */ } }
|
||||
body.preview-text-enabled #edit-text-filter, body.preview-text-enabled #text-preview {
|
||||
flex: 1;
|
||||
/* Each column takes an equal amount of available space */
|
||||
align-self: flex-start;
|
||||
/* Aligns the right column to the start, allowing it to maintain its content height */ }
|
||||
body.preview-text-enabled #edit-text-filter #pro-tips {
|
||||
display: none; }
|
||||
body.preview-text-enabled #text-preview {
|
||||
position: sticky;
|
||||
top: 20px;
|
||||
padding-top: 1rem;
|
||||
padding-bottom: 1rem;
|
||||
display: block !important; }
|
||||
body.preview-text-enabled #activate-text-preview {
|
||||
background-color: var(--color-grey-500); }
|
||||
body.preview-text-enabled .monospace-preview {
|
||||
background: var(--color-background-input);
|
||||
border: 1px solid var(--color-grey-600);
|
||||
padding: 1rem;
|
||||
color: var(--color-text-input);
|
||||
font-family: "Courier New", Courier, monospace;
|
||||
/* Sets the font to a monospace type */
|
||||
font-size: 70%;
|
||||
word-break: break-word;
|
||||
white-space: pre-wrap;
|
||||
/* Preserves whitespace and line breaks like <pre> */ }
|
||||
|
||||
#activate-text-preview {
|
||||
right: 0;
|
||||
position: absolute;
|
||||
z-index: 3;
|
||||
box-shadow: 1px 1px 4px var(--color-shadow-jump); }
|
||||
|
||||
body {
|
||||
color: var(--color-text);
|
||||
background: var(--color-background-page);
|
||||
@@ -479,9 +588,26 @@ button.toggle-button {
|
||||
padding: 5px;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
border-bottom: 2px solid var(--color-menu-accent);
|
||||
align-items: center; }
|
||||
|
||||
#pure-menu-horizontal-spinner {
|
||||
height: 3px;
|
||||
background: linear-gradient(-75deg, #ff6000, #ff8f00, #ffdd00, #ed0000);
|
||||
background-size: 400% 400%;
|
||||
width: 100%;
|
||||
animation: gradient 200s ease infinite; }
|
||||
|
||||
body.spinner-active #pure-menu-horizontal-spinner {
|
||||
animation: gradient 1s ease infinite; }
|
||||
|
||||
@keyframes gradient {
|
||||
0% {
|
||||
background-position: 0% 50%; }
|
||||
50% {
|
||||
background-position: 100% 50%; }
|
||||
100% {
|
||||
background-position: 0% 50%; } }
|
||||
|
||||
.pure-menu-heading {
|
||||
color: var(--color-text-menu-heading); }
|
||||
|
||||
@@ -491,8 +617,11 @@ button.toggle-button {
|
||||
background-color: var(--color-background-menu-link-hover);
|
||||
color: var(--color-text-menu-link-hover); }
|
||||
|
||||
.tab-pane-inner {
|
||||
scroll-margin-top: 200px; }
|
||||
|
||||
section.content {
|
||||
padding-top: 5em;
|
||||
padding-top: 100px;
|
||||
padding-bottom: 1em;
|
||||
flex-direction: column;
|
||||
display: flex;
|
||||
@@ -531,12 +660,15 @@ code {
|
||||
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
|
||||
margin: 0 3px 0 5px; }
|
||||
|
||||
.inline-tag, .watch-tag-list, .tracking-ldjson-price-data, .restock-label {
|
||||
white-space: nowrap;
|
||||
border-radius: 5px;
|
||||
padding: 2px 5px;
|
||||
margin-right: 4px; }
|
||||
|
||||
.watch-tag-list {
|
||||
color: var(--color-white);
|
||||
white-space: nowrap;
|
||||
background: var(--color-text-watch-tag-list);
|
||||
border-radius: 5px;
|
||||
padding: 2px 5px; }
|
||||
background: var(--color-text-watch-tag-list); }
|
||||
|
||||
.box {
|
||||
max-width: 80%;
|
||||
@@ -631,9 +763,6 @@ a.pure-button-selected {
|
||||
.button-cancel {
|
||||
background: var(--color-background-button-cancel); }
|
||||
|
||||
#save_button {
|
||||
margin-right: 1rem; }
|
||||
|
||||
.messages li {
|
||||
list-style: none;
|
||||
padding: 1em;
|
||||
@@ -832,8 +961,10 @@ footer {
|
||||
.pure-form .inline-radio ul {
|
||||
margin: 0px;
|
||||
list-style: none; }
|
||||
.pure-form .inline-radio ul li > * {
|
||||
display: inline-block; }
|
||||
.pure-form .inline-radio ul li {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 1em; }
|
||||
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
.box {
|
||||
@@ -869,12 +1000,24 @@ footer {
|
||||
.watch-table thead {
|
||||
display: block; }
|
||||
.watch-table thead tr th {
|
||||
display: inline-block; }
|
||||
display: inline-block; } }
|
||||
@media only screen and (max-width: 760px) and (max-width: 768px), (min-device-width: 768px) and (max-device-width: 800px) and (max-width: 768px) {
|
||||
.watch-table thead tr th .hide-on-mobile {
|
||||
display: none; } }
|
||||
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 800px) {
|
||||
.watch-table thead .empty-cell {
|
||||
display: none; }
|
||||
.watch-table tbody td,
|
||||
.watch-table tbody tr {
|
||||
display: block; }
|
||||
.watch-table tbody tr {
|
||||
display: flex;
|
||||
flex-wrap: wrap; }
|
||||
.watch-table tbody tr :nth-child(3) {
|
||||
flex-grow: 1; }
|
||||
.watch-table tbody tr :nth-last-child(-n+3) {
|
||||
flex-basis: 100%; }
|
||||
.watch-table .last-checked > span {
|
||||
vertical-align: middle; }
|
||||
.watch-table .last-checked::before {
|
||||
@@ -966,6 +1109,10 @@ textarea::placeholder {
|
||||
- We dont use 'size' with <input> because `size` is too unreliable to override, and will often push-out
|
||||
- Rely always on width in CSS
|
||||
*/
|
||||
/** Set max width for input field */
|
||||
.m-d {
|
||||
min-width: 100%; }
|
||||
|
||||
@media only screen and (min-width: 761px) {
|
||||
/* m-d is medium-desktop */
|
||||
.m-d {
|
||||
@@ -1026,7 +1173,8 @@ body.full-width .edit-form {
|
||||
.edit-form {
|
||||
min-width: 70%;
|
||||
/* so it cant overflow */
|
||||
max-width: 95%; }
|
||||
max-width: 95%;
|
||||
/* Make action buttons have consistent size and spacing */ }
|
||||
.edit-form .box-wrap {
|
||||
position: relative; }
|
||||
.edit-form .inner {
|
||||
@@ -1035,6 +1183,10 @@ body.full-width .edit-form {
|
||||
.edit-form #actions {
|
||||
display: block;
|
||||
background: var(--color-background); }
|
||||
.edit-form #actions .pure-control-group {
|
||||
display: flex;
|
||||
gap: 0.625em;
|
||||
flex-wrap: wrap; }
|
||||
.edit-form .pure-form-message-inline {
|
||||
padding-left: 0;
|
||||
color: var(--color-text-input-description); }
|
||||
@@ -1063,6 +1215,21 @@ ul {
|
||||
.time-check-widget tr input[type="number"] {
|
||||
width: 5em; }
|
||||
|
||||
@media only screen and (max-width: 760px) {
|
||||
.time-check-widget tbody {
|
||||
display: grid;
|
||||
grid-template-columns: auto 1fr auto 1fr;
|
||||
gap: 0.625em 0.3125em;
|
||||
align-items: center; }
|
||||
.time-check-widget tr {
|
||||
display: contents; }
|
||||
.time-check-widget tr th {
|
||||
text-align: right;
|
||||
padding-right: 5px; }
|
||||
.time-check-widget tr input[type="number"] {
|
||||
width: 100%;
|
||||
max-width: 5em; } }
|
||||
|
||||
#selector-wrapper {
|
||||
height: 100%;
|
||||
text-align: center;
|
||||
@@ -1153,9 +1320,7 @@ ul {
|
||||
.tracking-ldjson-price-data {
|
||||
background-color: var(--color-background-button-green);
|
||||
color: #000;
|
||||
padding: 3px;
|
||||
border-radius: 3px;
|
||||
white-space: nowrap; }
|
||||
opacity: 0.6; }
|
||||
|
||||
.ldjson-price-track-offer {
|
||||
font-weight: bold;
|
||||
@@ -1180,16 +1345,21 @@ ul {
|
||||
#quick-watch-processor-type ul li > * {
|
||||
display: inline-block; }
|
||||
|
||||
.restock-label {
|
||||
padding: 3px;
|
||||
border-radius: 3px;
|
||||
white-space: nowrap; }
|
||||
.restock-label.in-stock {
|
||||
background-color: var(--color-background-button-green);
|
||||
color: #fff; }
|
||||
.restock-label.not-in-stock {
|
||||
background-color: var(--color-background-button-cancel);
|
||||
color: #777; }
|
||||
.restock-label.in-stock {
|
||||
background-color: var(--color-background-button-green);
|
||||
color: #fff; }
|
||||
|
||||
.restock-label.not-in-stock {
|
||||
background-color: var(--color-background-button-cancel);
|
||||
color: #777; }
|
||||
|
||||
.restock-label.error {
|
||||
background-color: var(--color-background-button-error);
|
||||
color: #fff;
|
||||
opacity: 0.7; }
|
||||
|
||||
.restock-label svg {
|
||||
vertical-align: middle; }
|
||||
|
||||
#chrome-extension-link {
|
||||
padding: 9px;
|
||||
|
||||
@@ -4,6 +4,7 @@ from flask import (
|
||||
flash
|
||||
)
|
||||
|
||||
from .html_tools import TRANSLATE_WHITESPACE_TABLE
|
||||
from . model import App, Watch
|
||||
from copy import deepcopy, copy
|
||||
from os import path, unlink
|
||||
@@ -11,13 +12,15 @@ from threading import Lock
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
import secrets
|
||||
import threading
|
||||
import time
|
||||
import uuid as uuid_builder
|
||||
from loguru import logger
|
||||
|
||||
from .processors import get_custom_watch_obj_for_processor
|
||||
from .processors.restock_diff import Restock
|
||||
|
||||
# Because the server will run as a daemon and wont know the URL for notification links when firing off a notification
|
||||
BASE_URL_NOT_SET_TEXT = '("Base URL" not set - see settings - notifications)'
|
||||
|
||||
@@ -81,9 +84,13 @@ class ChangeDetectionStore:
|
||||
|
||||
# Convert each existing watch back to the Watch.model object
|
||||
for uuid, watch in self.__data['watching'].items():
|
||||
watch['uuid']=uuid
|
||||
self.__data['watching'][uuid] = Watch.model(datastore_path=self.datastore_path, default=watch)
|
||||
logger.info(f"Watching: {uuid} {self.__data['watching'][uuid]['url']}")
|
||||
self.__data['watching'][uuid] = self.rehydrate_entity(uuid, watch)
|
||||
logger.info(f"Watching: {uuid} {watch['url']}")
|
||||
|
||||
# And for Tags also, should be Restock type because it has extra settings
|
||||
for uuid, tag in self.__data['settings']['application']['tags'].items():
|
||||
self.__data['settings']['application']['tags'][uuid] = self.rehydrate_entity(uuid, tag, processor_override='restock_diff')
|
||||
logger.info(f"Tag: {uuid} {tag['title']}")
|
||||
|
||||
# First time ran, Create the datastore.
|
||||
except (FileNotFoundError):
|
||||
@@ -138,6 +145,22 @@ class ChangeDetectionStore:
|
||||
# Finally start the thread that will manage periodic data saves to JSON
|
||||
save_data_thread = threading.Thread(target=self.save_datastore).start()
|
||||
|
||||
def rehydrate_entity(self, uuid, entity, processor_override=None):
|
||||
"""Set the dict back to the dict Watch object"""
|
||||
entity['uuid'] = uuid
|
||||
|
||||
if processor_override:
|
||||
watch_class = get_custom_watch_obj_for_processor(processor_override)
|
||||
entity['processor']=processor_override
|
||||
else:
|
||||
watch_class = get_custom_watch_obj_for_processor(entity.get('processor'))
|
||||
|
||||
if entity.get('uuid') != 'text_json_diff':
|
||||
logger.trace(f"Loading Watch object '{watch_class.__module__}.{watch_class.__name__}' for UUID {uuid}")
|
||||
|
||||
entity = watch_class(datastore_path=self.datastore_path, default=entity)
|
||||
return entity
|
||||
|
||||
def set_last_viewed(self, uuid, timestamp):
|
||||
logger.debug(f"Setting watch UUID: {uuid} last viewed to {int(timestamp)}")
|
||||
self.data['watching'][uuid].update({'last_viewed': int(timestamp)})
|
||||
@@ -176,6 +199,9 @@ class ChangeDetectionStore:
|
||||
|
||||
@property
|
||||
def has_unviewed(self):
|
||||
if not self.__data.get('watching'):
|
||||
return None
|
||||
|
||||
for uuid, watch in self.__data['watching'].items():
|
||||
if watch.history_n >= 2 and watch.viewed == False:
|
||||
return True
|
||||
@@ -240,35 +266,11 @@ class ChangeDetectionStore:
|
||||
|
||||
# Remove a watchs data but keep the entry (URL etc)
|
||||
def clear_watch_history(self, uuid):
|
||||
import pathlib
|
||||
|
||||
# JSON Data, Screenshots, Textfiles (history index and snapshots), HTML in the future etc
|
||||
for item in pathlib.Path(os.path.join(self.datastore_path, uuid)).rglob("*.*"):
|
||||
unlink(item)
|
||||
|
||||
# Force the attr to recalculate
|
||||
bump = self.__data['watching'][uuid].history
|
||||
|
||||
# Do this last because it will trigger a recheck due to last_checked being zero
|
||||
self.__data['watching'][uuid].update({
|
||||
'browser_steps_last_error_step' : None,
|
||||
'check_count': 0,
|
||||
'fetch_time' : 0.0,
|
||||
'has_ldjson_price_data': None,
|
||||
'in_stock': None,
|
||||
'last_checked': 0,
|
||||
'last_error': False,
|
||||
'last_notification_error': False,
|
||||
'last_viewed': 0,
|
||||
'previous_md5': False,
|
||||
'previous_md5_before_filters': False,
|
||||
'remote_server_reply': None,
|
||||
'track_ldjson_price_data': None,
|
||||
})
|
||||
|
||||
self.__data['watching'][uuid].clear_watch()
|
||||
self.needs_write_urgent = True
|
||||
|
||||
def add_watch(self, url, tag='', extras=None, tag_uuids=None, write_to_disk_now=True):
|
||||
import requests
|
||||
|
||||
if extras is None:
|
||||
extras = {}
|
||||
@@ -342,11 +344,13 @@ class ChangeDetectionStore:
|
||||
if apply_extras.get('tags'):
|
||||
apply_extras['tags'] = list(set(apply_extras.get('tags')))
|
||||
|
||||
new_watch = Watch.model(datastore_path=self.datastore_path, url=url)
|
||||
# If the processor also has its own Watch implementation
|
||||
watch_class = get_custom_watch_obj_for_processor(apply_extras.get('processor'))
|
||||
new_watch = watch_class(datastore_path=self.datastore_path, url=url)
|
||||
|
||||
new_uuid = new_watch.get('uuid')
|
||||
|
||||
logger.debug(f"Adding URL {url} - {new_uuid}")
|
||||
logger.debug(f"Adding URL '{url}' - {new_uuid}")
|
||||
|
||||
for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']:
|
||||
if k in apply_extras:
|
||||
@@ -370,7 +374,7 @@ class ChangeDetectionStore:
|
||||
def visualselector_data_is_ready(self, watch_uuid):
|
||||
output_path = "{}/{}".format(self.datastore_path, watch_uuid)
|
||||
screenshot_filename = "{}/last-screenshot.png".format(output_path)
|
||||
elements_index_filename = "{}/elements.json".format(output_path)
|
||||
elements_index_filename = "{}/elements.deflate".format(output_path)
|
||||
if path.isfile(screenshot_filename) and path.isfile(elements_index_filename) :
|
||||
return True
|
||||
|
||||
@@ -582,7 +586,8 @@ class ChangeDetectionStore:
|
||||
# Eventually almost everything todo with a watch will apply as a Tag
|
||||
# So we use the same model as a Watch
|
||||
with self.lock:
|
||||
new_tag = Watch.model(datastore_path=self.datastore_path, default={
|
||||
from .model import Tag
|
||||
new_tag = Tag.model(datastore_path=self.datastore_path, default={
|
||||
'title': name.strip(),
|
||||
'date_created': int(time.time())
|
||||
})
|
||||
@@ -621,6 +626,39 @@ class ChangeDetectionStore:
|
||||
return next((v for v in tags if v.get('title', '').lower() == tag_name.lower()),
|
||||
None)
|
||||
|
||||
def any_watches_have_processor_by_name(self, processor_name):
|
||||
for watch in self.data['watching'].values():
|
||||
if watch.get('processor') == processor_name:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_unique_notification_tokens_available(self):
|
||||
# Ask each type of watch if they have any extra notification token to add to the validation
|
||||
extra_notification_tokens = {}
|
||||
watch_processors_checked = set()
|
||||
|
||||
for watch_uuid, watch in self.__data['watching'].items():
|
||||
processor = watch.get('processor')
|
||||
if processor not in watch_processors_checked:
|
||||
extra_notification_tokens.update(watch.extra_notification_token_values())
|
||||
watch_processors_checked.add(processor)
|
||||
|
||||
return extra_notification_tokens
|
||||
|
||||
def get_unique_notification_token_placeholders_available(self):
|
||||
# The actual description of the tokens, could be combined with get_unique_notification_tokens_available instead of doing this twice
|
||||
extra_notification_tokens = []
|
||||
watch_processors_checked = set()
|
||||
|
||||
for watch_uuid, watch in self.__data['watching'].items():
|
||||
processor = watch.get('processor')
|
||||
if processor not in watch_processors_checked:
|
||||
extra_notification_tokens+=watch.extra_notification_token_placeholder_info()
|
||||
watch_processors_checked.add(processor)
|
||||
|
||||
return extra_notification_tokens
|
||||
|
||||
|
||||
def get_updates_available(self):
|
||||
import inspect
|
||||
updates_available = []
|
||||
@@ -713,17 +751,17 @@ class ChangeDetectionStore:
|
||||
def update_5(self):
|
||||
# If the watch notification body, title look the same as the global one, unset it, so the watch defaults back to using the main settings
|
||||
# In other words - the watch notification_title and notification_body are not needed if they are the same as the default one
|
||||
current_system_body = self.data['settings']['application']['notification_body'].translate(str.maketrans('', '', "\r\n "))
|
||||
current_system_title = self.data['settings']['application']['notification_body'].translate(str.maketrans('', '', "\r\n "))
|
||||
current_system_body = self.data['settings']['application']['notification_body'].translate(TRANSLATE_WHITESPACE_TABLE)
|
||||
current_system_title = self.data['settings']['application']['notification_body'].translate(TRANSLATE_WHITESPACE_TABLE)
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
try:
|
||||
watch_body = watch.get('notification_body', '')
|
||||
if watch_body and watch_body.translate(str.maketrans('', '', "\r\n ")) == current_system_body:
|
||||
if watch_body and watch_body.translate(TRANSLATE_WHITESPACE_TABLE) == current_system_body:
|
||||
# Looks the same as the default one, so unset it
|
||||
watch['notification_body'] = None
|
||||
|
||||
watch_title = watch.get('notification_title', '')
|
||||
if watch_title and watch_title.translate(str.maketrans('', '', "\r\n ")) == current_system_title:
|
||||
if watch_title and watch_title.translate(TRANSLATE_WHITESPACE_TABLE) == current_system_title:
|
||||
# Looks the same as the default one, so unset it
|
||||
watch['notification_title'] = None
|
||||
except Exception as e:
|
||||
@@ -849,3 +887,40 @@ class ChangeDetectionStore:
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
if isinstance(watch.get('tags'), str):
|
||||
self.data['watching'][uuid]['tags'] = []
|
||||
|
||||
# Migrate old 'in_stock' values to the new Restock
|
||||
def update_17(self):
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
if 'in_stock' in watch:
|
||||
watch['restock'] = Restock({'in_stock': watch.get('in_stock')})
|
||||
del watch['in_stock']
|
||||
|
||||
# Migrate old restock settings
|
||||
def update_18(self):
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
if not watch.get('restock_settings'):
|
||||
# So we enable price following by default
|
||||
self.data['watching'][uuid]['restock_settings'] = {'follow_price_changes': True}
|
||||
|
||||
# Migrate and cleanoff old value
|
||||
self.data['watching'][uuid]['restock_settings']['in_stock_processing'] = 'in_stock_only' if watch.get(
|
||||
'in_stock_only') else 'all_changes'
|
||||
|
||||
if self.data['watching'][uuid].get('in_stock_only'):
|
||||
del (self.data['watching'][uuid]['in_stock_only'])
|
||||
|
||||
# Compress old elements.json to elements.deflate, saving disk, this compression is pretty fast.
|
||||
def update_19(self):
|
||||
import zlib
|
||||
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
json_path = os.path.join(self.datastore_path, uuid, "elements.json")
|
||||
deflate_path = os.path.join(self.datastore_path, uuid, "elements.deflate")
|
||||
|
||||
if os.path.exists(json_path):
|
||||
with open(json_path, "rb") as f_j:
|
||||
with open(deflate_path, "wb") as f_d:
|
||||
logger.debug(f"Compressing {str(json_path)} to {str(deflate_path)}..")
|
||||
f_d.write(zlib.compress(f_j.read()))
|
||||
os.unlink(json_path)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
|
||||
{% from '_helpers.html' import render_field %}
|
||||
|
||||
{% macro render_common_settings_form(form, emailprefix, settings_application) %}
|
||||
{% macro render_common_settings_form(form, emailprefix, settings_application, extra_notification_token_placeholder_info) %}
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_urls, rows=5, placeholder="Examples:
|
||||
Gitter - gitter://token/room
|
||||
@@ -11,8 +11,11 @@
|
||||
class="notification-urls" )
|
||||
}}
|
||||
<div class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Use <a target=_new href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service! <i><a target=_new href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes">Please read the notification services wiki here for important configuration notes</a></i>.</li>
|
||||
<p>
|
||||
<strong>Tip:</strong> Use <a target=_new href="https://github.com/caronc/apprise">AppRise Notification URLs</a> for notification to just about any service! <i><a target=_new href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes">Please read the notification services wiki here for important configuration notes</a></i>.<br>
|
||||
</p>
|
||||
<div data-target="#advanced-help-notifications" class="toggle-show pure-button button-tag button-xsmall">Show advanced help and tips</div>
|
||||
<ul style="display: none" id="advanced-help-notifications">
|
||||
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_discord">discord://</a></code> (or <code>https://discord.com/api/webhooks...</code>)) only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li>
|
||||
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> bots can't send messages to other bots, so you should specify chat ID of non-bot user.</li>
|
||||
<li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li>
|
||||
@@ -40,7 +43,7 @@
|
||||
|
||||
</div>
|
||||
<div class="pure-controls">
|
||||
<div id="notification-token-toggle" class="pure-button button-tag button-xsmall">Show token/placeholders</div>
|
||||
<div data-target="#notification-tokens-info" class="toggle-show pure-button button-tag button-xsmall">Show token/placeholders</div>
|
||||
</div>
|
||||
<div class="pure-controls" style="display: none;" id="notification-tokens-info">
|
||||
<table class="pure-table" id="token-table">
|
||||
@@ -107,7 +110,15 @@
|
||||
<tr>
|
||||
<td><code>{{ '{{triggered_text}}' }}</code></td>
|
||||
<td>Text that tripped the trigger from filters</td>
|
||||
</tr>
|
||||
|
||||
{% if extra_notification_token_placeholder_info %}
|
||||
{% for token in extra_notification_token_placeholder_info %}
|
||||
<tr>
|
||||
<td><code>{{ '{{' }}{{ token[0] }}{{ '}}' }}</code></td>
|
||||
<td>{{ token[1] }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="pure-form-message-inline">
|
||||
|
||||
@@ -59,4 +59,100 @@
|
||||
|
||||
{% macro render_button(field) %}
|
||||
{{ field(**kwargs)|safe }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro render_time_schedule_form(form, available_timezones, timezone_default_config) %}
|
||||
<style>
|
||||
.day-schedule *, .day-schedule select {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.day-schedule label[for*="time_schedule_limit-"][for$="-enabled"] {
|
||||
min-width: 6rem;
|
||||
font-weight: bold;
|
||||
}
|
||||
.day-schedule label {
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
.day-schedule table label {
|
||||
padding-left: 0.5rem;
|
||||
padding-right: 0.5rem;
|
||||
}
|
||||
#timespan-warning, input[id*='time_schedule_limit-timezone'].error {
|
||||
color: #ff0000;
|
||||
}
|
||||
.day-schedule.warning table {
|
||||
background-color: #ffbbc2;
|
||||
}
|
||||
ul#day-wrapper {
|
||||
list-style: none;
|
||||
}
|
||||
#timezone-info > * {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
#scheduler-icon-label {
|
||||
background-position: left center;
|
||||
background-repeat: no-repeat;
|
||||
background-size: contain;
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
padding-left: 50px;
|
||||
background-image: url({{ url_for('static_content', group='images', filename='schedule.svg') }});
|
||||
}
|
||||
#timespan-warning {
|
||||
display: none;
|
||||
}
|
||||
</style>
|
||||
<br>
|
||||
|
||||
{% if timezone_default_config %}
|
||||
<div>
|
||||
<span id="scheduler-icon-label" style="">
|
||||
{{ render_checkbox_field(form.time_schedule_limit.enabled) }}
|
||||
<div class="pure-form-message-inline">
|
||||
Set a hourly/week day schedule
|
||||
</div>
|
||||
</span>
|
||||
|
||||
</div>
|
||||
<br>
|
||||
<div id="schedule-day-limits-wrapper">
|
||||
<label>Schedule time limits</label><a data-template="business-hours"
|
||||
class="set-schedule pure-button button-secondary button-xsmall">Business
|
||||
hours</a>
|
||||
<a data-template="weekend" class="set-schedule pure-button button-secondary button-xsmall">Weekends</a>
|
||||
<a data-template="reset" class="set-schedule pure-button button-xsmall">Reset</a><br>
|
||||
<br>
|
||||
|
||||
<ul id="day-wrapper">
|
||||
{% for day in ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday'] %}
|
||||
<li class="day-schedule" id="schedule-{{ day }}">
|
||||
{{ render_nolabel_field(form.time_schedule_limit[day]) }}
|
||||
</li>
|
||||
{% endfor %}
|
||||
<li id="timespan-warning">Warning, one or more of your 'days' has a duration that would extend into the next day.<br>
|
||||
This could have unintended consequences.</li>
|
||||
<li id="timezone-info">
|
||||
{{ render_field(form.time_schedule_limit.timezone, placeholder=timezone_default_config) }} <span id="local-time-in-tz"></span>
|
||||
<datalist id="timezones" style="display: none;">
|
||||
{% for timezone in available_timezones %}
|
||||
<option value="{{ timezone }}">{{ timezone }}</option>
|
||||
{% endfor %}
|
||||
</datalist>
|
||||
</li>
|
||||
</ul>
|
||||
<br>
|
||||
<span class="pure-form-message-inline">
|
||||
<a href="https://changedetection.io/tutorials">More help and examples about using the scheduler</a>
|
||||
</span>
|
||||
</div>
|
||||
{% else %}
|
||||
<span class="pure-form-message-inline">
|
||||
Want to use a time schedule? <a href="{{url_for('settings_page')}}#timedate">First confirm/save your Time Zone Settings</a>
|
||||
</span>
|
||||
<br>
|
||||
{% endif %}
|
||||
|
||||
{% endmacro %}
|
||||
@@ -33,9 +33,11 @@
|
||||
<script src="{{url_for('static_content', group='js', filename='csrf.js')}}" defer></script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<body class="">
|
||||
<div class="header">
|
||||
<div class="home-menu pure-menu pure-menu-horizontal pure-menu-fixed" id="nav-menu">
|
||||
<div class="pure-menu-fixed" style="width: 100%;">
|
||||
<div class="home-menu pure-menu pure-menu-horizontal" id="nav-menu">
|
||||
|
||||
{% if has_password and not current_user.is_authenticated %}
|
||||
<a class="pure-menu-heading" href="https://changedetection.io" rel="noopener">
|
||||
<strong>Change</strong>Detection.io</a>
|
||||
@@ -68,7 +70,7 @@
|
||||
<a href="{{ url_for('import_page')}}" class="pure-menu-link">IMPORT</a>
|
||||
</li>
|
||||
<li class="pure-menu-item">
|
||||
<a href="{{ url_for('get_backup')}}" class="pure-menu-link">BACKUP</a>
|
||||
<a href="{{ url_for('backups.index')}}" class="pure-menu-link">BACKUPS</a>
|
||||
</li>
|
||||
{% else %}
|
||||
<li class="pure-menu-item">
|
||||
@@ -129,7 +131,12 @@
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div id="pure-menu-horizontal-spinner"></div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
{% if hosted_sticky %}
|
||||
<div class="sticky-tab" id="hosted-sticky">
|
||||
<a href="https://changedetection.io/?ref={{guid}}">Let us host your instance!</a>
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
<div id="settings">
|
||||
<form class="pure-form " action="" method="GET" id="diff-form">
|
||||
<fieldset>
|
||||
<fieldset class="diff-fieldset">
|
||||
{% if versions|length >= 1 %}
|
||||
<strong>Compare</strong>
|
||||
<del class="change"><span>from</span></del>
|
||||
@@ -33,7 +33,7 @@
|
||||
</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<button type="submit" class="pure-button pure-button-primary">Go</button>
|
||||
<button type="submit" class="pure-button pure-button-primary reset-margin">Go</button>
|
||||
{% endif %}
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.html' import render_field, render_checkbox_field, render_button %}
|
||||
{% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form %}
|
||||
{% from '_common_fields.html' import render_common_settings_form %}
|
||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='global-settings.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='scheduler.js')}}" defer></script>
|
||||
<script>
|
||||
const browser_steps_available_screenshots=JSON.parse('{{ watch.get_browsersteps_available_screenshots|tojson }}');
|
||||
const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}');
|
||||
@@ -16,16 +18,15 @@
|
||||
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
|
||||
{% endif %}
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test', watch_uuid=uuid)}}";
|
||||
const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %};
|
||||
const playwright_enabled={% if playwright_enabled %}true{% else %}false{% endif %};
|
||||
const recheck_proxy_start_url="{{url_for('check_proxies.start_check', uuid=uuid)}}";
|
||||
const proxy_recheck_status_url="{{url_for('check_proxies.get_recheck_status', uuid=uuid)}}";
|
||||
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
|
||||
const watch_visual_selector_data_url="{{url_for('static_content', group='visual_selector_data', filename=uuid)}}";
|
||||
const default_system_fetch_backend="{{ settings_application['fetch_backend'] }}";
|
||||
</script>
|
||||
|
||||
<script src="{{url_for('static_content', group='js', filename='plugins.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='limit.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='visual-selector.js')}}" defer></script>
|
||||
{% if playwright_enabled %}
|
||||
@@ -41,17 +42,15 @@
|
||||
<ul>
|
||||
<li class="tab" id=""><a href="#general">General</a></li>
|
||||
<li class="tab"><a href="#request">Request</a></li>
|
||||
{% if extra_tab_content %}
|
||||
<li class="tab"><a href="#extras_tab">{{ extra_tab_content }}</a></li>
|
||||
{% endif %}
|
||||
{% if playwright_enabled %}
|
||||
<li class="tab"><a id="browsersteps-tab" href="#browser-steps">Browser Steps</a></li>
|
||||
{% endif %}
|
||||
|
||||
{% if watch['processor'] == 'text_json_diff' %}
|
||||
<li class="tab"><a id="visualselector-tab" href="#visualselector">Visual Filter Selector</a></li>
|
||||
<li class="tab"><a href="#filters-and-triggers">Filters & Triggers</a></li>
|
||||
{% endif %}
|
||||
|
||||
{% if watch['processor'] == 'restock_diff' %}
|
||||
<li class="tab"><a href="#restock">Restock Detection</a></li>
|
||||
<li class="tab" id="filters-and-triggers-tab"><a href="#filters-and-triggers">Filters & Triggers</a></li>
|
||||
{% endif %}
|
||||
<li class="tab"><a href="#notifications">Notifications</a></li>
|
||||
<li class="tab"><a href="#stats">Stats</a></li>
|
||||
@@ -60,25 +59,18 @@
|
||||
|
||||
<div class="box-wrap inner">
|
||||
<form class="pure-form pure-form-stacked"
|
||||
action="{{ url_for('edit_page', uuid=uuid, next = request.args.get('next'), unpause_on_save = request.args.get('unpause_on_save')) }}" method="POST">
|
||||
action="{{ url_for('edit_page', uuid=uuid, next = request.args.get('next'), unpause_on_save = request.args.get('unpause_on_save'), tag = request.args.get('tag')) }}" method="POST">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
|
||||
<div class="tab-pane-inner" id="general">
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.url, placeholder="https://...", required=true, class="m-d") }}
|
||||
<span class="pure-form-message-inline">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></span><br>
|
||||
<span class="pure-form-message-inline">You can use variables in the URL, perfect for inserting the current date and other logic, <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a></span><br>
|
||||
<span class="pure-form-message-inline">
|
||||
{% if watch['processor'] == 'text_json_diff' %}
|
||||
Current mode: <strong>Webpage Text/HTML, JSON and PDF changes.</strong><br>
|
||||
<a href="{{url_for('edit_page', uuid=uuid)}}?switch_processor=restock_diff" class="pure-button button-xsmall">Switch to re-stock detection mode.</a>
|
||||
{% else %}
|
||||
Current mode: <strong>Re-stock detection.</strong><br>
|
||||
<a href="{{url_for('edit_page', uuid=uuid)}}?switch_processor=text_json_diff" class="pure-button button-xsmall">Switch to Webpage Text/HTML, JSON and PDF changes mode.</a>
|
||||
{% endif %}
|
||||
</span>
|
||||
|
||||
<div class="pure-form-message">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></div>
|
||||
<div class="pure-form-message">Variables are supported in the URL (<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a>).</div>
|
||||
</div>
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_field(form.processor) }}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.title, class="m-d") }}
|
||||
@@ -88,9 +80,24 @@
|
||||
<span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span>
|
||||
</div>
|
||||
<div class="pure-control-group time-between-check border-fieldset">
|
||||
{{ render_field(form.time_between_check, class="time-check-widget") }}
|
||||
|
||||
{{ render_checkbox_field(form.time_between_check_use_default, class="use-default-timecheck") }}
|
||||
</div>
|
||||
<br>
|
||||
<div id="time-check-widget-wrapper">
|
||||
{{ render_field(form.time_between_check, class="time-check-widget") }}
|
||||
|
||||
<span class="pure-form-message-inline">
|
||||
The interval/amount of time between each check.
|
||||
</span>
|
||||
</div>
|
||||
<div id="time-between-check-schedule">
|
||||
<!-- Start Time and End Time -->
|
||||
<div id="limit-between-time">
|
||||
{{ render_time_schedule_form(form, available_timezones, timezone_default_config) }}
|
||||
</div>
|
||||
</div>
|
||||
<br>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_checkbox_field(form.extract_title_as_title) }}
|
||||
</div>
|
||||
@@ -158,21 +165,24 @@
|
||||
{{ render_field(form.method) }}
|
||||
</div>
|
||||
<div id="request-body">
|
||||
{{ render_field(form.body, rows=5, placeholder="Example
|
||||
{{ render_field(form.body, rows=7, placeholder="Example
|
||||
{
|
||||
\"name\":\"John\",
|
||||
\"age\":30,
|
||||
\"car\":null
|
||||
\"car\":null,
|
||||
\"year\":{% now 'Europe/Berlin', '%Y' %}
|
||||
}") }}
|
||||
</div>
|
||||
<div class="pure-form-message">Variables are supported in the request body (<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a>).</div>
|
||||
</div>
|
||||
</fieldset>
|
||||
<!-- hmm -->
|
||||
<div class="pure-control-group advanced-options" style="display: none;">
|
||||
{{ render_field(form.headers, rows=5, placeholder="Example
|
||||
{{ render_field(form.headers, rows=7, placeholder="Example
|
||||
Cookie: foobar
|
||||
User-Agent: wonderbra 1.0") }}
|
||||
|
||||
User-Agent: wonderbra 1.0
|
||||
Math: {{ 1 + 1 }}") }}
|
||||
<div class="pure-form-message">Variables are supported in the request header values (<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a>).</div>
|
||||
<div class="pure-form-message-inline">
|
||||
{% if has_extra_headers_file %}
|
||||
<strong>Alert! Extra headers file found and will be added to this watch!</strong>
|
||||
@@ -208,7 +218,7 @@ User-Agent: wonderbra 1.0") }}
|
||||
<div id="loading-status-text" style="display: none;">Please wait, first browser step can take a little time to load..<div class="spinner"></div></div>
|
||||
<div class="flex-wrapper" >
|
||||
|
||||
<div id="browser-steps-ui" class="noselect" style="width: 100%; background-color: #eee; border-radius: 5px;">
|
||||
<div id="browser-steps-ui" class="noselect">
|
||||
|
||||
<div class="noselect" id="browsersteps-selector-wrapper" style="width: 100%">
|
||||
<span class="loader" >
|
||||
@@ -223,7 +233,7 @@ User-Agent: wonderbra 1.0") }}
|
||||
<canvas class="noselect" id="browsersteps-selector-canvas" style="max-width: 100%; width: 100%;"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
<div id="browser-steps-fieldlist" style="padding-left: 1em; width: 350px; font-size: 80%;" >
|
||||
<div id="browser-steps-fieldlist" >
|
||||
<span id="browser-seconds-remaining">Loading</span> <span style="font-size: 80%;"> (<a target=_new href="https://github.com/dgtlmoon/changedetection.io/pull/478/files#diff-1a79d924d1840c485238e66772391268a89c95b781d69091384cf1ea1ac146c9R4">?</a>) </span>
|
||||
{{ render_field(form.browser_steps) }}
|
||||
</div>
|
||||
@@ -255,14 +265,17 @@ User-Agent: wonderbra 1.0") }}
|
||||
{% endif %}
|
||||
<a href="#notifications" id="notification-setting-reset-to-default" class="pure-button button-xsmall" style="right: 20px; top: 20px; position: absolute; background-color: #5f42dd; border-radius: 4px; font-size: 70%; color: #fff">Use system defaults</a>
|
||||
|
||||
{{ render_common_settings_form(form, emailprefix, settings_application) }}
|
||||
{{ render_common_settings_form(form, emailprefix, settings_application, extra_notification_token_placeholder_info) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
{% if watch['processor'] == 'text_json_diff' %}
|
||||
<div class="tab-pane-inner" id="filters-and-triggers">
|
||||
<div class="pure-control-group">
|
||||
<span id="activate-text-preview" class="pure-button pure-button-primary button-xsmall">Activate preview</span>
|
||||
<div>
|
||||
<div id="edit-text-filter">
|
||||
<div class="pure-control-group" id="pro-tips">
|
||||
<strong>Pro-tips:</strong><br>
|
||||
<ul>
|
||||
<li>
|
||||
@@ -284,9 +297,9 @@ xpath://body/div/span[contains(@class, 'example-class')]",
|
||||
{% if '/text()' in field %}
|
||||
<span class="pure-form-message-inline"><strong>Note!: //text() function does not work where the <element> contains <![CDATA[]]></strong></span><br>
|
||||
{% endif %}
|
||||
<span class="pure-form-message-inline">One rule per line, <i>any</i> rules that matches will be used.<br>
|
||||
|
||||
<ul>
|
||||
<span class="pure-form-message-inline">One CSS, xPath, JSON Path/JQ selector per line, <i>any</i> rules that matches will be used.<br>
|
||||
<p><div data-target="#advanced-help-selectors" class="toggle-show pure-button button-tag button-xsmall">Show advanced help and tips</div><br></p>
|
||||
<ul id="advanced-help-selectors" style="display: none;">
|
||||
<li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li>
|
||||
<li>JSON - Limit text to this JSON rule, using either <a href="https://pypi.org/project/jsonpath-ng/" target="new">JSONPath</a> or <a href="https://stedolan.github.io/jq/" target="new">jq</a> (if installed).
|
||||
<ul>
|
||||
@@ -306,21 +319,25 @@ xpath://body/div/span[contains(@class, 'example-class')]",
|
||||
<li>To use XPath1.0: Prefix with <code>xpath1:</code></li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
Please be sure that you thoroughly understand how to write CSS, JSONPath, XPath{% if jq_support %}, or jq selector{%endif%} rules before filing an issue on GitHub! <a
|
||||
<li>
|
||||
Please be sure that you thoroughly understand how to write CSS, JSONPath, XPath{% if jq_support %}, or jq selector{%endif%} rules before filing an issue on GitHub! <a
|
||||
href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
</span>
|
||||
</div>
|
||||
<fieldset class="pure-control-group">
|
||||
{{ render_field(form.subtractive_selectors, rows=5, placeholder=has_tag_filters_extra+"header
|
||||
footer
|
||||
nav
|
||||
.stockticker") }}
|
||||
.stockticker
|
||||
//*[contains(text(), 'Advertisement')]") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li> Remove HTML element(s) by CSS selector before text conversion. </li>
|
||||
<li> Don't paste HTML here, use only CSS selectors </li>
|
||||
<li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li>
|
||||
<li> Remove HTML element(s) by CSS and XPath selectors before text conversion. </li>
|
||||
<li> Don't paste HTML here, use only CSS and XPath selectors </li>
|
||||
<li> Add multiple elements, CSS or XPath selectors per line to ignore multiple parts of the HTML. </li>
|
||||
</ul>
|
||||
</span>
|
||||
</fieldset>
|
||||
@@ -331,18 +348,25 @@ nav
|
||||
{{ render_checkbox_field(form.filter_text_added) }}
|
||||
{{ render_checkbox_field(form.filter_text_replaced) }}
|
||||
{{ render_checkbox_field(form.filter_text_removed) }}
|
||||
<span class="pure-form-message-inline">Note: Depending on the length and similarity of the text on each line, the algorithm may consider an <strong>addition</strong> instead of <strong>replacement</strong> for example.</span>
|
||||
<span class="pure-form-message-inline">So it's always better to select <strong>Added</strong>+<strong>Replaced</strong> when you're interested in new content.</span><br>
|
||||
<span class="pure-form-message-inline">When content is merely moved in a list, it will also trigger an <strong>addition</strong>, consider enabling <code><strong>Only trigger when unique lines appear</strong></code></span>
|
||||
<span class="pure-form-message-inline">Note: Depending on the length and similarity of the text on each line, the algorithm may consider an <strong>addition</strong> instead of <strong>replacement</strong> for example.</span><br>
|
||||
<span class="pure-form-message-inline"> So it's always better to select <strong>Added</strong>+<strong>Replaced</strong> when you're interested in new content.</span><br>
|
||||
<span class="pure-form-message-inline"> When content is merely moved in a list, it will also trigger an <strong>addition</strong>, consider enabling <code><strong>Only trigger when unique lines appear</strong></code></span>
|
||||
</fieldset>
|
||||
<fieldset class="pure-control-group">
|
||||
{{ render_checkbox_field(form.check_unique_lines) }}
|
||||
<span class="pure-form-message-inline">Good for websites that just move the content around, and you want to know when NEW content is added, compares new lines against all history for this watch.</span>
|
||||
</fieldset>
|
||||
<fieldset class="pure-control-group">
|
||||
{{ render_checkbox_field(form.remove_duplicate_lines) }}
|
||||
<span class="pure-form-message-inline">Remove duplicate lines of text</span>
|
||||
</fieldset>
|
||||
|
||||
<fieldset class="pure-control-group">
|
||||
{{ render_checkbox_field(form.sort_text_alphabetically) }}
|
||||
<span class="pure-form-message-inline">Helps reduce changes detected caused by sites shuffling lines around, combine with <i>check unique lines</i> below.</span>
|
||||
</fieldset>
|
||||
<fieldset class="pure-control-group">
|
||||
{{ render_checkbox_field(form.check_unique_lines) }}
|
||||
<span class="pure-form-message-inline">Good for websites that just move the content around, and you want to know when NEW content is added, compares new lines against all history for this watch.</span>
|
||||
{{ render_checkbox_field(form.trim_text_whitespace) }}
|
||||
<span class="pure-form-message-inline">Remove any whitespace before and after each line of text</span>
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
@@ -365,10 +389,10 @@ nav
|
||||
") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Matching text will be <strong>ignored</strong> in the text snapshot (you can still see it but it wont trigger a change)</li>
|
||||
<li>Each line processed separately, any line matching will be ignored (removed before creating the checksum)</li>
|
||||
<li>Regular Expression support, wrap the entire line in forward slash <code>/regex/</code></li>
|
||||
<li>Changing this will affect the comparison checksum which may trigger an alert</li>
|
||||
<li>Use the preview/show current tab to see ignores</li>
|
||||
</ul>
|
||||
</span>
|
||||
|
||||
@@ -392,7 +416,9 @@ Unavailable") }}
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.extract_text, rows=5, placeholder="\d+ online") }}
|
||||
{{ render_field(form.extract_text, rows=5, placeholder="/.+?\d+ comments.+?/
|
||||
or
|
||||
keyword") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Extracts text in the final output (line by line) after other filters using regular expressions or string match;
|
||||
@@ -412,19 +438,33 @@ Unavailable") }}
|
||||
</fieldset>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if watch['processor'] == 'restock_diff' %}
|
||||
<div class="tab-pane-inner" id="restock">
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_checkbox_field(form.in_stock_only) }}
|
||||
<span class="pure-form-message-inline">Only trigger notifications when page changes from <strong>out of stock</strong> to <strong>back in stock</strong></span>
|
||||
</div>
|
||||
</fieldset>
|
||||
<div id="text-preview" style="display: none;" >
|
||||
<script>
|
||||
const preview_text_edit_filters_url="{{url_for('watch_get_preview_rendered', uuid=uuid)}}";
|
||||
</script>
|
||||
<br>
|
||||
{#<div id="text-preview-controls"><span id="text-preview-refresh" class="pure-button button-xsmall">Refresh</span></div>#}
|
||||
<div class="minitabs-wrapper">
|
||||
<div class="minitabs-content">
|
||||
<div id="text-preview-inner" class="monospace-preview">
|
||||
<p>Loading...</p>
|
||||
</div>
|
||||
<div id="text-preview-before-inner" style="display: none;" class="monospace-preview">
|
||||
<p>Loading...</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endif %}
|
||||
{# rendered sub Template #}
|
||||
{% if extra_form_content %}
|
||||
<div class="tab-pane-inner" id="extras_tab">
|
||||
{{ extra_form_content|safe }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if watch['processor'] == 'text_json_diff' %}
|
||||
<div class="tab-pane-inner visual-selector-ui" id="visualselector">
|
||||
<img class="beta-logo" src="{{url_for('static_content', group='images', filename='beta-logo.png')}}" alt="New beta functionality">
|
||||
@@ -494,6 +534,12 @@ Unavailable") }}
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
{% if watch.history_n %}
|
||||
<p>
|
||||
<a href="{{url_for('watch_get_latest_html', uuid=uuid)}}" class="pure-button button-small">Download latest HTML snapshot</a>
|
||||
</p>
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div id="actions">
|
||||
|
||||
@@ -3,11 +3,13 @@
|
||||
{% block content %}
|
||||
<script>
|
||||
const screenshot_url = "{{url_for('static_content', group='screenshot', filename=uuid)}}";
|
||||
const triggered_line_numbers = {{ triggered_line_numbers|tojson }};
|
||||
{% if last_error_screenshot %}
|
||||
const error_screenshot_url = "{{url_for('static_content', group='screenshot', filename=uuid, error_screenshot=1) }}";
|
||||
{% endif %}
|
||||
const highlight_submit_ignore_url = "{{url_for('highlight_submit_ignore_url', uuid=uuid)}}";
|
||||
</script>
|
||||
<script src="{{url_for('static_content', group='js', filename='plugins.js')}}"></script>
|
||||
<script src="{{ url_for('static_content', group='js', filename='diff-overview.js') }}" defer></script>
|
||||
<script src="{{ url_for('static_content', group='js', filename='preview.js') }}" defer></script>
|
||||
<script src="{{ url_for('static_content', group='js', filename='tabs.js') }}" defer></script>
|
||||
@@ -67,16 +69,15 @@
|
||||
|
||||
<div class="tab-pane-inner" id="text">
|
||||
<div class="snapshot-age">{{ current_version|format_timestamp_timeago }}</div>
|
||||
<span class="ignored">Grey lines are ignored</span> <span class="triggered">Blue lines are triggers</span>
|
||||
<span class="tip"><strong>Pro-tip</strong>: Highlight text to add to ignore filters</span>
|
||||
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td id="diff-col" class="highlightable-filter">
|
||||
{% for row in content %}
|
||||
<div class="{{ row.classes }}">{{ row.line }}</div>
|
||||
{% endfor %}
|
||||
<pre style="border-left: 2px solid #ddd;">
|
||||
{{ content }}
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{% extends 'base.html' %}
|
||||
|
||||
{% block content %}
|
||||
{% from '_helpers.html' import render_field, render_checkbox_field, render_button %}
|
||||
{% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form %}
|
||||
{% from '_common_fields.html' import render_common_settings_form %}
|
||||
<script>
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test', mode="global-settings")}}";
|
||||
@@ -10,9 +10,11 @@
|
||||
{% endif %}
|
||||
</script>
|
||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='plugins.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='global-settings.js')}}" defer></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='scheduler.js')}}" defer></script>
|
||||
<div class="edit-form">
|
||||
<div class="tabs collapsable">
|
||||
<ul>
|
||||
@@ -21,6 +23,7 @@
|
||||
<li class="tab"><a href="#fetching">Fetching</a></li>
|
||||
<li class="tab"><a href="#filters">Global Filters</a></li>
|
||||
<li class="tab"><a href="#api">API</a></li>
|
||||
<li class="tab"><a href="#timedate">Time & Date</a></li>
|
||||
<li class="tab"><a href="#proxies">CAPTCHA & Proxies</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
@@ -32,6 +35,12 @@
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.requests.form.time_between_check, class="time-check-widget") }}
|
||||
<span class="pure-form-message-inline">Default recheck time for all watches, current system minimum is <i>{{min_system_recheck_seconds}}</i> seconds (<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Misc-system-settings#enviroment-variables">more info</a>).</span>
|
||||
<div id="time-between-check-schedule">
|
||||
<!-- Start Time and End Time -->
|
||||
<div id="limit-between-time">
|
||||
{{ render_time_schedule_form(form.requests, available_timezones, timezone_default_config) }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.requests.form.jitter_seconds, class="jitter_seconds") }}
|
||||
@@ -76,7 +85,7 @@
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_checkbox_field(form.application.form.empty_pages_are_a_change) }}
|
||||
<span class="pure-form-message-inline">When a page contains HTML, but no renderable text appears (empty page), is this considered a change?</span>
|
||||
<span class="pure-form-message-inline">When a request returns no content, or the HTML does not contain any text, is this considered a change?</span>
|
||||
</div>
|
||||
{% if form.requests.proxy %}
|
||||
<div class="pure-control-group inline-radio">
|
||||
@@ -92,7 +101,7 @@
|
||||
<div class="tab-pane-inner" id="notifications">
|
||||
<fieldset>
|
||||
<div class="field-group">
|
||||
{{ render_common_settings_form(form.application.form, emailprefix, settings_application) }}
|
||||
{{ render_common_settings_form(form.application.form, emailprefix, settings_application, extra_notification_token_placeholder_info) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
<div class="pure-control-group" id="notification-base-url">
|
||||
@@ -155,11 +164,13 @@
|
||||
{{ render_field(form.application.form.global_subtractive_selectors, rows=5, placeholder="header
|
||||
footer
|
||||
nav
|
||||
.stockticker") }}
|
||||
.stockticker
|
||||
//*[contains(text(), 'Advertisement')]") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li> Remove HTML element(s) by CSS selector before text conversion. </li>
|
||||
<li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li>
|
||||
<li> Remove HTML element(s) by CSS and XPath selectors before text conversion. </li>
|
||||
<li> Don't paste HTML here, use only CSS and XPath selectors </li>
|
||||
<li> Add multiple elements, CSS or XPath selectors per line to ignore multiple parts of the HTML. </li>
|
||||
</ul>
|
||||
</span>
|
||||
</fieldset>
|
||||
@@ -170,11 +181,11 @@ nav
|
||||
<span class="pure-form-message-inline">Note: This is applied globally in addition to the per-watch rules.</span><br>
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Matching text will be <strong>ignored</strong> in the text snapshot (you can still see it but it wont trigger a change)</li>
|
||||
<li>Note: This is applied globally in addition to the per-watch rules.</li>
|
||||
<li>Each line processed separately, any line matching will be ignored (removed before creating the checksum)</li>
|
||||
<li>Regular Expression support, wrap the entire line in forward slash <code>/regex/</code></li>
|
||||
<li>Changing this will affect the comparison checksum which may trigger an alert</li>
|
||||
<li>Use the preview/show current tab to see ignores</li>
|
||||
</ul>
|
||||
</span>
|
||||
</fieldset>
|
||||
@@ -209,6 +220,23 @@ nav
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="tab-pane-inner" id="timedate">
|
||||
<div class="pure-control-group">
|
||||
Ensure the settings below are correct, they are used to manage the time schedule for checking your web page watches.
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
<p><strong>UTC Time & Date from Server:</strong> <span id="utc-time" >{{ utc_time }}</span></p>
|
||||
<p><strong>Local Time & Date in Browser:</strong> <span class="local-time" data-utc="{{ utc_time }}"></span></p>
|
||||
<p>
|
||||
{{ render_field(form.application.form.timezone) }}
|
||||
<datalist id="timezones" style="display: none;">
|
||||
{% for tz_name in available_timezones %}
|
||||
<option value="{{ tz_name }}">{{ tz_name }}</option>
|
||||
{% endfor %}
|
||||
</datalist>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="tab-pane-inner" id="proxies">
|
||||
<div id="recommended-proxy">
|
||||
<div>
|
||||
@@ -274,7 +302,7 @@ nav
|
||||
<div class="pure-control-group">
|
||||
{{ render_button(form.save_button) }}
|
||||
<a href="{{url_for('index')}}" class="pure-button button-small button-cancel">Back</a>
|
||||
<a href="{{url_for('clear_all_history')}}" class="pure-button button-small button-cancel">Clear Snapshot History</a>
|
||||
<a href="{{url_for('clear_all_history')}}" class="pure-button button-small button-error">Clear Snapshot History</a>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
|
||||
<div class="box">
|
||||
|
||||
<form class="pure-form" action="{{ url_for('form_quick_watch_add') }}" method="POST" id="new-watch-form">
|
||||
<form class="pure-form" action="{{ url_for('form_quick_watch_add', tag=active_tag_uuid) }}" method="POST" id="new-watch-form">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||
<fieldset>
|
||||
<legend>Add a new change detection watch</legend>
|
||||
@@ -59,6 +59,11 @@
|
||||
{% set sort_order = sort_order or 'asc' %}
|
||||
{% set sort_attribute = sort_attribute or 'last_changed' %}
|
||||
{% set pagination_page = request.args.get('page', 0) %}
|
||||
{% set cols_required = 6 %}
|
||||
{% set any_has_restock_price_processor = datastore.any_watches_have_processor_by_name("restock_diff") %}
|
||||
{% if any_has_restock_price_processor %}
|
||||
{% set cols_required = cols_required + 1 %}
|
||||
{% endif %}
|
||||
|
||||
<div id="watch-table-wrapper">
|
||||
|
||||
@@ -70,15 +75,18 @@
|
||||
<th><input style="vertical-align: middle" type="checkbox" id="check-all" > <a class="{{ 'active '+link_order if sort_attribute == 'date_created' else 'inactive' }}" href="{{url_for('index', sort='date_created', order=link_order, tag=active_tag_uuid)}}"># <span class='arrow {{link_order}}'></span></a></th>
|
||||
<th class="empty-cell"></th>
|
||||
<th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('index', sort='label', order=link_order, tag=active_tag_uuid)}}">Website <span class='arrow {{link_order}}'></span></a></th>
|
||||
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_checked' else 'inactive' }}" href="{{url_for('index', sort='last_checked', order=link_order, tag=active_tag_uuid)}}">Last Checked <span class='arrow {{link_order}}'></span></a></th>
|
||||
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_changed' else 'inactive' }}" href="{{url_for('index', sort='last_changed', order=link_order, tag=active_tag_uuid)}}">Last Changed <span class='arrow {{link_order}}'></span></a></th>
|
||||
{% if any_has_restock_price_processor %}
|
||||
<th>Restock & Price</th>
|
||||
{% endif %}
|
||||
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_checked' else 'inactive' }}" href="{{url_for('index', sort='last_checked', order=link_order, tag=active_tag_uuid)}}"><span class="hide-on-mobile">Last</span> Checked <span class='arrow {{link_order}}'></span></a></th>
|
||||
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_changed' else 'inactive' }}" href="{{url_for('index', sort='last_changed', order=link_order, tag=active_tag_uuid)}}"><span class="hide-on-mobile">Last</span> Changed <span class='arrow {{link_order}}'></span></a></th>
|
||||
<th class="empty-cell"></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% if not watches|length %}
|
||||
<tr>
|
||||
<td colspan="6" style="text-wrap: wrap;">No website watches configured, please add a URL in the box above, or <a href="{{ url_for('import_page')}}" >import a list</a>.</td>
|
||||
<td colspan="{{ cols_required }}" style="text-wrap: wrap;">No website watches configured, please add a URL in the box above, or <a href="{{ url_for('import_page')}}" >import a list</a>.</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% for watch in (watches|sort(attribute=sort_attribute, reverse=sort_order == 'asc'))|pagination_slice(skip=pagination.skip) %}
|
||||
@@ -91,6 +99,7 @@
|
||||
{% if watch.last_notification_error is defined and watch.last_notification_error != False %}error{% endif %}
|
||||
{% if watch.paused is defined and watch.paused != False %}paused{% endif %}
|
||||
{% if is_unviewed %}unviewed{% endif %}
|
||||
{% if watch.has_restock_info %} has-restock-info {% if watch['restock']['in_stock'] %}in-stock{% else %}not-in-stock{% endif %} {% else %}no-restock-info{% endif %}
|
||||
{% if watch.uuid in queued_uuids %}queued{% endif %}">
|
||||
<td class="inline checkbox-uuid" ><input name="uuids" type="checkbox" value="{{ watch.uuid}} " > <span>{{ loop.index+pagination.skip }}</span></td>
|
||||
<td class="inline watch-controls">
|
||||
@@ -135,30 +144,39 @@
|
||||
|
||||
{% if watch['processor'] == 'text_json_diff' %}
|
||||
{% if watch['has_ldjson_price_data'] and not watch['track_ldjson_price_data'] %}
|
||||
<div class="ldjson-price-track-offer">Embedded price data detected, follow only price data? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
|
||||
<div class="ldjson-price-track-offer">Switch to Restock & Price watch mode? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
|
||||
{% endif %}
|
||||
{% if watch['track_ldjson_price_data'] == 'accepted' %}
|
||||
{% endif %}
|
||||
{% if watch['processor'] == 'restock_diff' %}
|
||||
<span class="tracking-ldjson-price-data" title="Automatically following embedded price information"><img src="{{url_for('static_content', group='images', filename='price-tag-icon.svg')}}" class="status-icon price-follow-tag-icon" > Price</span>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% if watch['processor'] == 'restock_diff' %}
|
||||
<span class="restock-label {{'in-stock' if watch['in_stock'] else 'not-in-stock' }}" title="detecting restock conditions">
|
||||
<!-- maybe some object watch['processor'][restock_diff] or.. -->
|
||||
{% if watch['last_checked'] and watch['in_stock'] != None %}
|
||||
{% if watch['in_stock'] %} In stock {% else %} Not in stock {% endif %}
|
||||
{% else %}
|
||||
Not yet checked
|
||||
{% endif %}
|
||||
</span>
|
||||
{% endif %}
|
||||
|
||||
|
||||
{% for watch_tag_uuid, watch_tag in datastore.get_all_tags_for_watch(watch['uuid']).items() %}
|
||||
<span class="watch-tag-list">{{ watch_tag.title }}</span>
|
||||
{% endfor %}
|
||||
|
||||
</td>
|
||||
<!-- @todo make it so any watch handler obj can expose this --->
|
||||
{% if any_has_restock_price_processor %}
|
||||
<td class="restock-and-price">
|
||||
{% if watch['processor'] == 'restock_diff' %}
|
||||
{% if watch.has_restock_info %}
|
||||
<span class="restock-label {{'in-stock' if watch['restock']['in_stock'] else 'not-in-stock' }}" title="Detecting restock and price">
|
||||
<!-- maybe some object watch['processor'][restock_diff] or.. -->
|
||||
{% if watch['restock']['in_stock'] %} In stock {% else %} Not in stock {% endif %}
|
||||
</span>
|
||||
{% endif %}
|
||||
|
||||
{% if watch.get('restock') and watch['restock']['price'] != None %}
|
||||
{% if watch['restock']['price'] != None %}
|
||||
<span class="restock-label price" title="Price">
|
||||
{{ watch['restock']['price']|format_number_locale }} {{ watch['restock']['currency'] }}
|
||||
</span>
|
||||
{% endif %}
|
||||
{% elif not watch.has_restock_info %}
|
||||
<span class="restock-label error">No information</span>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</td>
|
||||
{% endif %}
|
||||
<td class="last-checked" data-timestamp="{{ watch.last_checked }}">{{watch|format_last_checked_time|safe}}</td>
|
||||
<td class="last-changed" data-timestamp="{{ watch.last_changed }}">{% if watch.history_n >=2 and watch.last_changed >0 %}
|
||||
{{watch.last_changed|format_timestamp_timeago}}
|
||||
@@ -169,13 +187,13 @@
|
||||
<td>
|
||||
<a {% if watch.uuid in queued_uuids %}disabled="true"{% endif %} href="{{ url_for('form_watch_checknow', uuid=watch.uuid, tag=request.args.get('tag')) }}"
|
||||
class="recheck pure-button pure-button-primary">{% if watch.uuid in queued_uuids %}Queued{% else %}Recheck{% endif %}</a>
|
||||
<a href="{{ url_for('edit_page', uuid=watch.uuid)}}#general" class="pure-button pure-button-primary">Edit</a>
|
||||
<a href="{{ url_for('edit_page', uuid=watch.uuid, tag=active_tag_uuid)}}#general" class="pure-button pure-button-primary">Edit</a>
|
||||
{% if watch.history_n >= 2 %}
|
||||
|
||||
{% if is_unviewed %}
|
||||
<a href="{{ url_for('diff_history_page', uuid=watch.uuid, from_version=watch.get_next_snapshot_key_to_last_viewed) }}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">Diff</a>
|
||||
<a href="{{ url_for('diff_history_page', uuid=watch.uuid, from_version=watch.get_next_snapshot_key_to_last_viewed) }}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">History</a>
|
||||
{% else %}
|
||||
<a href="{{ url_for('diff_history_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">Diff</a>
|
||||
<a href="{{ url_for('diff_history_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">History</a>
|
||||
{% endif %}
|
||||
|
||||
{% else %}
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
import resource
|
||||
import time
|
||||
from threading import Thread
|
||||
|
||||
import pytest
|
||||
from changedetectionio import changedetection_app
|
||||
@@ -23,6 +26,36 @@ def reportlog(pytestconfig):
|
||||
yield
|
||||
logger.remove(handler_id)
|
||||
|
||||
|
||||
def track_memory(memory_usage, ):
|
||||
while not memory_usage["stop"]:
|
||||
max_rss = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
|
||||
memory_usage["peak"] = max(memory_usage["peak"], max_rss)
|
||||
time.sleep(0.01) # Adjust the sleep time as needed
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def measure_memory_usage(request):
|
||||
memory_usage = {"peak": 0, "stop": False}
|
||||
tracker_thread = Thread(target=track_memory, args=(memory_usage,))
|
||||
tracker_thread.start()
|
||||
|
||||
yield
|
||||
|
||||
memory_usage["stop"] = True
|
||||
tracker_thread.join()
|
||||
|
||||
# Note: ru_maxrss is in kilobytes on Unix-based systems
|
||||
max_memory_used = memory_usage["peak"] / 1024 # Convert to MB
|
||||
s = f"Peak memory used by the test {request.node.fspath} - '{request.node.name}': {max_memory_used:.2f} MB"
|
||||
logger.debug(s)
|
||||
|
||||
with open("test-memory.log", 'a') as f:
|
||||
f.write(f"{s}\n")
|
||||
|
||||
# Assert that the memory usage is less than 200MB
|
||||
# assert max_memory_used < 150, f"Memory usage exceeded 200MB: {max_memory_used:.2f} MB"
|
||||
|
||||
|
||||
def cleanup(datastore_path):
|
||||
import glob
|
||||
# Unlink test output files
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# !/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
|
||||
from flask import url_for
|
||||
@@ -77,13 +77,13 @@ def do_test(client, live_server, make_test_use_extra_browser=False):
|
||||
|
||||
|
||||
# Requires playwright to be installed
|
||||
def test_request_via_custom_browser_url(client, live_server):
|
||||
def test_request_via_custom_browser_url(client, live_server, measure_memory_usage):
|
||||
live_server_setup(live_server)
|
||||
# We do this so we can grep the logs of the custom container and see if the request actually went through that container
|
||||
do_test(client, live_server, make_test_use_extra_browser=True)
|
||||
|
||||
|
||||
def test_request_not_via_custom_browser_url(client, live_server):
|
||||
def test_request_not_via_custom_browser_url(client, live_server, measure_memory_usage):
|
||||
live_server_setup(live_server)
|
||||
# We do this so we can grep the logs of the custom container and see if the request actually went through that container
|
||||
do_test(client, live_server, make_test_use_extra_browser=False)
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from .. import conftest
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
@@ -6,7 +6,7 @@ from ..util import live_server_setup, wait_for_all_checks
|
||||
import logging
|
||||
|
||||
# Requires playwright to be installed
|
||||
def test_fetch_webdriver_content(client, live_server):
|
||||
def test_fetch_webdriver_content(client, live_server, measure_memory_usage):
|
||||
live_server_setup(live_server)
|
||||
|
||||
#####################
|
||||
|
||||
@@ -3,7 +3,7 @@ from flask import url_for
|
||||
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
|
||||
|
||||
def test_execute_custom_js(client, live_server):
|
||||
def test_execute_custom_js(client, live_server, measure_memory_usage):
|
||||
|
||||
live_server_setup(live_server)
|
||||
assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
|
||||
|
||||
6
changedetectionio/tests/itemprop_test_examples/README.md
Normal file
6
changedetectionio/tests/itemprop_test_examples/README.md
Normal file
@@ -0,0 +1,6 @@
|
||||
# A list of real world examples!
|
||||
|
||||
Always the price should be 666.66 for our tests
|
||||
|
||||
see test_restock_itemprop.py::test_special_prop_examples
|
||||
|
||||
25
changedetectionio/tests/itemprop_test_examples/a.txt
Normal file
25
changedetectionio/tests/itemprop_test_examples/a.txt
Normal file
@@ -0,0 +1,25 @@
|
||||
<div class="PriceSection PriceSection_PriceSection__Vx1_Q PriceSection_variantHuge__P9qxg PdpPriceSection"
|
||||
data-testid="price-section"
|
||||
data-optly-product-tile-price-section="true"><span
|
||||
class="PriceRange ProductPrice variant-huge" itemprop="offers"
|
||||
itemscope="" itemtype="http://schema.org/Offer"><div
|
||||
class="VisuallyHidden_VisuallyHidden__VBD83">$155.55</div><span
|
||||
aria-hidden="true" class="Price variant-huge" data-testid="price"
|
||||
itemprop="price"><sup class="sup" data-testid="price-symbol"
|
||||
itemprop="priceCurrency" content="AUD">$</sup><span
|
||||
class="dollars" data-testid="price-value" itemprop="price"
|
||||
content="155.55">155.55</span><span class="extras"><span class="sup"
|
||||
data-testid="price-sup"></span></span></span></span>
|
||||
</div>
|
||||
|
||||
<script type="application/ld+json">{
|
||||
"@type": "Product",
|
||||
"@context": "https://schema.org",
|
||||
"name": "test",
|
||||
"description": "test",
|
||||
"offers": {
|
||||
"@type": "Offer",
|
||||
"priceCurrency": "AUD",
|
||||
"price": 155.55
|
||||
},
|
||||
}</script>
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from .. import conftest
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
from flask import url_for
|
||||
from ..util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
def test_preferred_proxy(client, live_server):
|
||||
def test_preferred_proxy(client, live_server, measure_memory_usage):
|
||||
live_server_setup(live_server)
|
||||
url = "http://chosen.changedetection.io"
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
|
||||
|
||||
def test_noproxy_option(client, live_server):
|
||||
def test_noproxy_option(client, live_server, measure_memory_usage):
|
||||
live_server_setup(live_server)
|
||||
# Run by run_proxy_tests.sh
|
||||
# Call this URL then scan the containers that it never went through them
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
|
||||
# just make a request, we will grep in the docker logs to see it actually got called
|
||||
def test_check_basic_change_detection_functionality(client, live_server):
|
||||
def test_check_basic_change_detection_functionality(client, live_server, measure_memory_usage):
|
||||
live_server_setup(live_server)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
@@ -16,4 +16,4 @@ def test_check_basic_change_detection_functionality(client, live_server):
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
time.sleep(3)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
@@ -6,7 +6,7 @@ from ..util import live_server_setup, wait_for_all_checks
|
||||
import os
|
||||
|
||||
# just make a request, we will grep in the docker logs to see it actually got called
|
||||
def test_select_custom(client, live_server):
|
||||
def test_select_custom(client, live_server, measure_memory_usage):
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Goto settings, add our custom one
|
||||
@@ -44,7 +44,7 @@ def test_select_custom(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
# We should see something via proxy
|
||||
assert b'<div class=""> - 0.' in res.data
|
||||
assert b' - 0.' in res.data
|
||||
|
||||
#
|
||||
# Now we should see the request in the container logs for "squid-squid-custom" because it will be the only default
|
||||
|
||||
@@ -1,12 +1,27 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from flask import url_for
|
||||
from changedetectionio.tests.util import live_server_setup, wait_for_all_checks
|
||||
from changedetectionio.tests.util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
|
||||
|
||||
def test_socks5(client, live_server):
|
||||
def set_response():
|
||||
import time
|
||||
data = f"""<html>
|
||||
<body>
|
||||
<h1>Awesome, you made it</h1>
|
||||
yeah the socks request worked
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(data)
|
||||
time.sleep(1)
|
||||
|
||||
def test_socks5(client, live_server, measure_memory_usage):
|
||||
live_server_setup(live_server)
|
||||
set_response()
|
||||
|
||||
# Setup a proxy
|
||||
res = client.post(
|
||||
@@ -24,7 +39,10 @@ def test_socks5(client, live_server):
|
||||
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
test_url = "https://changedetection.io/CHANGELOG.txt?socks-test-tag=" + os.getenv('SOCKSTEST', '')
|
||||
# Because the socks server should connect back to us
|
||||
test_url = url_for('test_endpoint', _external=True) + f"?socks-test-tag={os.getenv('SOCKSTEST', '')}"
|
||||
test_url = test_url.replace('localhost.localdomain', 'cdio')
|
||||
test_url = test_url.replace('localhost', 'cdio')
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
@@ -60,4 +78,25 @@ def test_socks5(client, live_server):
|
||||
)
|
||||
|
||||
# Should see the proper string
|
||||
assert "+0200:".encode('utf-8') in res.data
|
||||
assert "Awesome, you made it".encode('utf-8') in res.data
|
||||
|
||||
# PROXY CHECKER WIDGET CHECK - this needs more checking
|
||||
uuid = extract_UUID_from_client(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("check_proxies.start_check", uuid=uuid),
|
||||
follow_redirects=True
|
||||
)
|
||||
# It's probably already finished super fast :(
|
||||
#assert b"RUNNING" in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(
|
||||
url_for("check_proxies.get_recheck_status", uuid=uuid),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"OK" in res.data
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
@@ -1,16 +1,32 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import time
|
||||
from flask import url_for
|
||||
from changedetectionio.tests.util import live_server_setup, wait_for_all_checks
|
||||
|
||||
|
||||
def set_response():
|
||||
import time
|
||||
data = f"""<html>
|
||||
<body>
|
||||
<h1>Awesome, you made it</h1>
|
||||
yeah the socks request worked
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(data)
|
||||
time.sleep(1)
|
||||
|
||||
# should be proxies.json mounted from run_proxy_tests.sh already
|
||||
# -v `pwd`/tests/proxy_socks5/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json
|
||||
def test_socks5_from_proxiesjson_file(client, live_server):
|
||||
def test_socks5_from_proxiesjson_file(client, live_server, measure_memory_usage):
|
||||
live_server_setup(live_server)
|
||||
|
||||
test_url = "https://changedetection.io/CHANGELOG.txt?socks-test-tag=" + os.getenv('SOCKSTEST', '')
|
||||
set_response()
|
||||
# Because the socks server should connect back to us
|
||||
test_url = url_for('test_endpoint', _external=True) + f"?socks-test-tag={os.getenv('SOCKSTEST', '')}"
|
||||
test_url = test_url.replace('localhost.localdomain', 'cdio')
|
||||
test_url = test_url.replace('localhost', 'cdio')
|
||||
|
||||
res = client.get(url_for("settings_page"))
|
||||
assert b'name="requests-proxy" type="radio" value="socks5proxy"' in res.data
|
||||
@@ -49,4 +65,4 @@ def test_socks5_from_proxiesjson_file(client, live_server):
|
||||
)
|
||||
|
||||
# Should see the proper string
|
||||
assert "+0200:".encode('utf-8') in res.data
|
||||
assert "Awesome, you made it".encode('utf-8') in res.data
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from .. import conftest
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import time
|
||||
from flask import url_for
|
||||
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client, wait_for_notification_endpoint_output
|
||||
from changedetectionio.notification import (
|
||||
default_notification_body,
|
||||
default_notification_format,
|
||||
@@ -48,7 +48,7 @@ def set_back_in_stock_response():
|
||||
return None
|
||||
|
||||
# Add a site in paused mode, add an invalid filter, we should still have visual selector data ready
|
||||
def test_restock_detection(client, live_server):
|
||||
def test_restock_detection(client, live_server, measure_memory_usage):
|
||||
|
||||
set_original_response()
|
||||
#assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
|
||||
@@ -94,7 +94,7 @@ def test_restock_detection(client, live_server):
|
||||
assert b'not-in-stock' not in res.data
|
||||
|
||||
# We should have a notification
|
||||
time.sleep(2)
|
||||
wait_for_notification_endpoint_output()
|
||||
assert os.path.isfile("test-datastore/notification.txt"), "Notification received"
|
||||
os.unlink("test-datastore/notification.txt")
|
||||
|
||||
@@ -103,6 +103,7 @@ def test_restock_detection(client, live_server):
|
||||
set_original_response()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
time.sleep(5)
|
||||
assert not os.path.isfile("test-datastore/notification.txt"), "No notification should have fired when it went OUT OF STOCK by default"
|
||||
|
||||
# BUT we should see that it correctly shows "not in stock"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
import asyncio
|
||||
from aiosmtpd.controller import Controller
|
||||
from aiosmtpd.smtp import SMTP
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user