Compare commits
	
		
			168 Commits
		
	
	
		
			socks5-tes
			...
			0.49.11
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 108cdf84a5 | ||
|   | 8c6f6f1578 | ||
|   | df4ffaaff8 | ||
|   | d522c65e50 | ||
|   | c3b2a8b019 | ||
|   | 28d3151090 | ||
|   | 2a1c832f8d | ||
|   | 0170adb171 | ||
|   | cb62404b8c | ||
|   | 8f9c46bd3f | ||
|   | 97291ce6d0 | ||
|   | f689e5418e | ||
|   | f751f0b0ef | ||
|   | ea9ba3bb2e | ||
|   | c7ffebce2a | ||
|   | 54b7c070f7 | ||
|   | 6c1b687cd1 | ||
|   | e850540a91 | ||
|   | d4bc9dfc50 | ||
|   | f26ea55e9c | ||
|   | b53e1985ac | ||
|   | 302ef80d95 | ||
|   | 5b97c29714 | ||
|   | 64075c87ee | ||
|   | d58a71cffc | ||
|   | 036b006226 | ||
|   | f29f89d078 | ||
|   | 289f118581 | ||
|   | 10b2bbea83 | ||
|   | 32d110b92f | ||
|   | 860a5f5c1a | ||
|   | 70a18ee4b5 | ||
|   | 73189672c3 | ||
|   | 7e7d5dc383 | ||
|   | 1c2cfc37aa | ||
|   | 0634fe021d | ||
|   | 04934b6b3b | ||
|   | ff00417bc5 | ||
|   | 849c5b2293 | ||
|   | 4bf560256b | ||
|   | 7903b03a0c | ||
|   | 5e7c0880c1 | ||
|   | 957aef4ff3 | ||
|   | 8e9a83d8f4 | ||
|   | 5961838143 | ||
|   | 8cf4a8128b | ||
|   | 24c3bfe5ad | ||
|   | bdd9760f3c | ||
|   | e37467f649 | ||
|   | d42fdf0257 | ||
|   | 939fa86582 | ||
|   | b87c92b9e0 | ||
|   | 4d5535d72c | ||
|   | ad08219d03 | ||
|   | 82211eef82 | ||
|   | 5d9380609c | ||
|   | a8b3918fca | ||
|   | e83fb37fb6 | ||
|   | 6b99afe0f7 | ||
|   | 09ebc6ec63 | ||
|   | 6b1065502e | ||
|   | d4c470984a | ||
|   | 55da48f719 | ||
|   | dbd4adf23a | ||
|   | b1e700b3ff | ||
|   | 1c61b5a623 | ||
|   | e799a1cdcb | ||
|   | 938065db6f | ||
|   | 4f2d38ff49 | ||
|   | 8960f401b7 | ||
|   | 1c1f1c6f6b | ||
|   | a2a98811a5 | ||
|   | 5a0ef8fc01 | ||
|   | d90de0851d | ||
|   | 360b4f0d8b | ||
|   | 6fc04d7f1c | ||
|   | 66fb05527b | ||
|   | 202e47d728 | ||
|   | d67d396b88 | ||
|   | 05f54f0ce6 | ||
|   | 6adf10597e | ||
|   | 4419bc0e61 | ||
|   | f7e9846c9b | ||
|   | 5dea5e1def | ||
|   | 0fade0a473 | ||
|   | 121e9c20e0 | ||
|   | 12cec2d541 | ||
|   | d52e6e8e11 | ||
|   | bae1a89b75 | ||
|   | e49711f449 | ||
|   | a3a3ab0622 | ||
|   | c5fe188b28 | ||
|   | 1fb0adde54 | ||
|   | 2614b275f0 | ||
|   | 1631a55830 | ||
|   | f00b8e4efb | ||
|   | 179ca171d4 | ||
|   | 84f2870d4f | ||
|   | 7421e0f95e | ||
|   | c6162e48f1 | ||
|   | feccb18cdc | ||
|   | 1462ad89ac | ||
|   | cfb9fadec8 | ||
|   | d9f9fa735d | ||
|   | 6084b0f23d | ||
|   | 4e18aea5ff | ||
|   | fdba6b5566 | ||
|   | 4e6c783c45 | ||
|   | 0f0f5af7b5 | ||
|   | 7fcba26bea | ||
|   | 4bda1a234f | ||
|   | d297850539 | ||
|   | 751239250f | ||
|   | 6aceeb01ab | ||
|   | 49bc982c69 | ||
|   | e0abf0b505 | ||
|   | f08a1185aa | ||
|   | ad5d7efbbf | ||
|   | 7029d10f8b | ||
|   | 26d3a23e05 | ||
|   | 942625e1fb | ||
|   | 33c83230a6 | ||
|   | 87510becb5 | ||
|   | 5e95dc62a5 | ||
|   | 7d94535dbf | ||
|   | 563c196396 | ||
|   | e8b82c47ca | ||
|   | e84de7e8f4 | ||
|   | 1543edca24 | ||
|   | 82e0b99b07 | ||
|   | b0ff9d161e | ||
|   | c1dd681643 | ||
|   | ecafa27833 | ||
|   | f7d4e58613 | ||
|   | 5bb47e47db | ||
|   | 03151da68e | ||
|   | a16a70229d | ||
|   | 9476c1076b | ||
|   | a4959b5971 | ||
|   | a278fa22f2 | ||
|   | d39530b261 | ||
|   | d4b4355ff5 | ||
|   | c1c8de3104 | ||
|   | 5a768d7db3 | ||
|   | f38429ec93 | ||
|   | 783926962d | ||
|   | 6cd1d50a4f | ||
|   | 54a4970a4c | ||
|   | fd00453e6d | ||
|   | 2842ffb205 | ||
|   | ec4e2f5649 | ||
|   | fe8e3d1cb1 | ||
|   | 69fbafbdb7 | ||
|   | f255165571 | ||
|   | 7ff34baa90 | ||
|   | 043378d09c | ||
|   | af4bafcff8 | ||
|   | b656338c63 | ||
|   | 97af190910 | ||
|   | e9e063e18e | ||
|   | 45c444d0db | ||
|   | 00458b95c4 | ||
|   | dad9760832 | ||
|   | e2c2a76cb2 | ||
|   | 5b34aece96 | ||
|   | 1b625dc18a | ||
|   | 367afc81e9 | ||
|   | ddfbef6db3 | 
| @@ -1,18 +1,31 @@ | ||||
| .git | ||||
| .github | ||||
| changedetectionio/processors/__pycache__ | ||||
| changedetectionio/api/__pycache__ | ||||
| changedetectionio/model/__pycache__ | ||||
| changedetectionio/blueprint/price_data_follower/__pycache__ | ||||
| changedetectionio/blueprint/tags/__pycache__ | ||||
| changedetectionio/blueprint/__pycache__ | ||||
| changedetectionio/blueprint/browser_steps/__pycache__ | ||||
| changedetectionio/fetchers/__pycache__ | ||||
| changedetectionio/tests/visualselector/__pycache__ | ||||
| changedetectionio/tests/restock/__pycache__ | ||||
| changedetectionio/tests/__pycache__ | ||||
| changedetectionio/tests/fetchers/__pycache__ | ||||
| changedetectionio/tests/unit/__pycache__ | ||||
| changedetectionio/tests/proxy_list/__pycache__ | ||||
| changedetectionio/__pycache__ | ||||
| # Git | ||||
| .git/ | ||||
| .gitignore | ||||
|  | ||||
| # GitHub | ||||
| .github/ | ||||
|  | ||||
| # Byte-compiled / optimized / DLL files | ||||
| **/__pycache__ | ||||
| **/*.py[cod] | ||||
|  | ||||
| # Caches | ||||
| .mypy_cache/ | ||||
| .pytest_cache/ | ||||
| .ruff_cache/ | ||||
|  | ||||
| # Distribution / packaging | ||||
| build/ | ||||
| dist/ | ||||
| *.egg-info* | ||||
|  | ||||
| # Virtual environment | ||||
| .env | ||||
| .venv/ | ||||
| venv/ | ||||
|  | ||||
| # IntelliJ IDEA | ||||
| .idea/ | ||||
|  | ||||
| # Visual Studio | ||||
| .vscode/ | ||||
|   | ||||
							
								
								
									
										4
									
								
								.github/ISSUE_TEMPLATE/bug_report.md
									
									
									
									
										vendored
									
									
								
							
							
						
						| @@ -27,6 +27,10 @@ A clear and concise description of what the bug is. | ||||
| **Version** | ||||
| *Exact version* in the top right area: 0.... | ||||
|  | ||||
| **How did you install?** | ||||
|  | ||||
| Docker, Pip, from source directly etc | ||||
|  | ||||
| **To Reproduce** | ||||
|  | ||||
| Steps to reproduce the behavior: | ||||
|   | ||||
							
								
								
									
										23
									
								
								.github/test/Dockerfile-alpine
									
									
									
									
										vendored
									
									
								
							
							
						
						| @@ -2,32 +2,33 @@ | ||||
| # Test that we can still build on Alpine (musl modified libc https://musl.libc.org/) | ||||
| # Some packages wont install via pypi because they dont have a wheel available under this architecture. | ||||
|  | ||||
| FROM ghcr.io/linuxserver/baseimage-alpine:3.18 | ||||
| FROM ghcr.io/linuxserver/baseimage-alpine:3.21 | ||||
| ENV PYTHONUNBUFFERED=1 | ||||
|  | ||||
| COPY requirements.txt /requirements.txt | ||||
|  | ||||
| RUN \ | ||||
|   apk add --update --no-cache --virtual=build-dependencies \ | ||||
|  apk add --update --no-cache --virtual=build-dependencies \ | ||||
|     build-base \ | ||||
|     cargo \ | ||||
|     g++ \ | ||||
|     gcc \ | ||||
|     git \ | ||||
|     jpeg-dev \ | ||||
|     libc-dev \ | ||||
|     libffi-dev \ | ||||
|     libjpeg \ | ||||
|     libxslt-dev \ | ||||
|     make \ | ||||
|     openssl-dev \ | ||||
|     py3-wheel \ | ||||
|     python3-dev \ | ||||
|     zip \ | ||||
|     zlib-dev && \ | ||||
|   apk add --update --no-cache \ | ||||
|     libjpeg \ | ||||
|     libxslt \ | ||||
|     python3 \ | ||||
|     py3-pip && \ | ||||
|     nodejs \ | ||||
|     poppler-utils \ | ||||
|     python3 && \ | ||||
|   echo "**** pip3 install test of changedetection.io ****" && \ | ||||
|   pip3 install -U pip wheel setuptools && \ | ||||
|   pip3 install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.18/ -r /requirements.txt && \ | ||||
|   python3 -m venv /lsiopy  && \ | ||||
|   pip install -U pip wheel setuptools && \ | ||||
|   pip install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.21/ -r /requirements.txt && \ | ||||
|   apk del --purge \ | ||||
|     build-dependencies | ||||
|   | ||||
							
								
								
									
										19
									
								
								.github/workflows/containers.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						| @@ -103,6 +103,19 @@ jobs: | ||||
| #          provenance: false | ||||
|  | ||||
|       # A new tagged release is required, which builds :tag and :latest | ||||
|       - name: Docker meta :tag | ||||
|         if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.') | ||||
|         uses: docker/metadata-action@v5 | ||||
|         id: meta | ||||
|         with: | ||||
|             images: | | ||||
|                 ${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io | ||||
|                 ghcr.io/dgtlmoon/changedetection.io | ||||
|             tags: | | ||||
|                 type=semver,pattern={{version}} | ||||
|                 type=semver,pattern={{major}}.{{minor}} | ||||
|                 type=semver,pattern={{major}} | ||||
|  | ||||
|       - name: Build and push :tag | ||||
|         id: docker_build_tag_release | ||||
|         if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.') | ||||
| @@ -111,11 +124,7 @@ jobs: | ||||
|           context: ./ | ||||
|           file: ./Dockerfile | ||||
|           push: true | ||||
|           tags: | | ||||
|             ${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:${{ github.event.release.tag_name }} | ||||
|             ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }} | ||||
|             ${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest | ||||
|             ghcr.io/dgtlmoon/changedetection.io:latest | ||||
|           tags: ${{ steps.meta.outputs.tags }} | ||||
|           platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8 | ||||
|           cache-from: type=gha | ||||
|           cache-to: type=gha,mode=max | ||||
|   | ||||
							
								
								
									
										7
									
								
								.github/workflows/pypi-release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						| @@ -45,9 +45,12 @@ jobs: | ||||
|     - name: Test that the basic pip built package runs without error | ||||
|       run: | | ||||
|         set -ex | ||||
|         sudo pip3 install --upgrade pip  | ||||
|         pip3 install dist/changedetection.io*.whl | ||||
|         ls -alR  | ||||
|          | ||||
|         # Find and install the first .whl file | ||||
|         find dist -type f -name "*.whl" -exec pip3 install {} \; -quit | ||||
|         changedetection.io -d /tmp -p 10000 & | ||||
|          | ||||
|         sleep 3 | ||||
|         curl --retry-connrefused --retry 6 http://127.0.0.1:10000/static/styles/pure-min.css >/dev/null | ||||
|         curl --retry-connrefused --retry 6 http://127.0.0.1:10000/ >/dev/null | ||||
|   | ||||
							
								
								
									
										8
									
								
								.github/workflows/test-only.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						| @@ -28,7 +28,6 @@ jobs: | ||||
|     uses: ./.github/workflows/test-stack-reusable-workflow.yml | ||||
|     with: | ||||
|       python-version: '3.11' | ||||
|       skip-pypuppeteer: true | ||||
|  | ||||
|   test-application-3-12: | ||||
|     needs: lint-code | ||||
| @@ -37,3 +36,10 @@ jobs: | ||||
|       python-version: '3.12' | ||||
|       skip-pypuppeteer: true | ||||
|  | ||||
|   test-application-3-13: | ||||
|     needs: lint-code | ||||
|     uses: ./.github/workflows/test-stack-reusable-workflow.yml | ||||
|     with: | ||||
|       python-version: '3.13' | ||||
|       skip-pypuppeteer: true | ||||
|        | ||||
|   | ||||
| @@ -7,7 +7,7 @@ on: | ||||
|         description: 'Python version to use' | ||||
|         required: true | ||||
|         type: string | ||||
|         default: '3.10' | ||||
|         default: '3.11' | ||||
|       skip-pypuppeteer: | ||||
|         description: 'Skip PyPuppeteer (not supported in 3.11/3.12)' | ||||
|         required: false | ||||
| @@ -64,14 +64,16 @@ jobs: | ||||
|           echo "Running processes in docker..." | ||||
|           docker ps | ||||
|  | ||||
|       - name: Test built container with Pytest (generally as requests/plaintext fetching) | ||||
|       - name: Run Unit Tests | ||||
|         run: | | ||||
|           # Unit tests | ||||
|           echo "run test with unittest" | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_notification_diff' | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_watch_model' | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_jinja2_security' | ||||
|            | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_semver' | ||||
|  | ||||
|       - name: Test built container with Pytest (generally as requests/plaintext fetching) | ||||
|         run: | | ||||
|           # All tests | ||||
|           echo "run test with pytest" | ||||
|           # The default pytest logger_level is TRACE | ||||
|   | ||||
							
								
								
									
										39
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						| @@ -1,14 +1,29 @@ | ||||
| __pycache__ | ||||
| .idea | ||||
| *.pyc | ||||
| datastore/url-watches.json | ||||
| datastore/* | ||||
| __pycache__ | ||||
| .pytest_cache | ||||
| build | ||||
| dist | ||||
| venv | ||||
| test-datastore/* | ||||
| test-datastore | ||||
| # Byte-compiled / optimized / DLL files | ||||
| **/__pycache__ | ||||
| **/*.py[cod] | ||||
|  | ||||
| # Caches | ||||
| .mypy_cache/ | ||||
| .pytest_cache/ | ||||
| .ruff_cache/ | ||||
|  | ||||
| # Distribution / packaging | ||||
| build/ | ||||
| dist/ | ||||
| *.egg-info* | ||||
|  | ||||
| # Virtual environment | ||||
| .env | ||||
| .venv/ | ||||
| venv/ | ||||
|  | ||||
| # IDEs | ||||
| .idea | ||||
| .vscode/settings.json | ||||
|  | ||||
| # Datastore files | ||||
| datastore/ | ||||
| test-datastore/ | ||||
|  | ||||
| # Memory consumption log | ||||
| test-memory.log | ||||
|   | ||||
| @@ -4,7 +4,7 @@ In any commercial activity involving 'Hosting' (as defined herein), whether in p | ||||
|  | ||||
| # Commercial License Agreement | ||||
|  | ||||
| This Commercial License Agreement ("Agreement") is entered into by and between Mr Morresi (the original creator of this software) here-in ("Licensor") and (your company or personal name) _____________ ("Licensee"). This Agreement sets forth the terms and conditions under which Licensor provides its software ("Software") and services to Licensee for the purpose of reselling the software either in part or full, as part of any commercial activity where the activity involves a third party. | ||||
| This Commercial License Agreement ("Agreement") is entered into by and between Web Technologies s.r.o. here-in ("Licensor") and (your company or personal name) _____________ ("Licensee"). This Agreement sets forth the terms and conditions under which Licensor provides its software ("Software") and services to Licensee for the purpose of reselling the software either in part or full, as part of any commercial activity where the activity involves a third party. | ||||
|  | ||||
| ### Definition of Hosting | ||||
|  | ||||
|   | ||||
| @@ -1,8 +1,5 @@ | ||||
| # pip dependencies install stage | ||||
|  | ||||
| # @NOTE! I would love to move to 3.11 but it breaks the async handler in changedetectionio/content_fetchers/puppeteer.py | ||||
| #        If you know how to fix it, please do! and test it for both 3.10 and 3.11 | ||||
|  | ||||
| ARG PYTHON_VERSION=3.11 | ||||
|  | ||||
| FROM python:${PYTHON_VERSION}-slim-bookworm AS builder | ||||
| @@ -32,11 +29,12 @@ RUN pip install --extra-index-url https://www.piwheels.org/simple  --target=/dep | ||||
| # Playwright is an alternative to Selenium | ||||
| # Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing | ||||
| # https://github.com/dgtlmoon/changedetection.io/pull/1067 also musl/alpine (not supported) | ||||
| RUN pip install --target=/dependencies playwright~=1.41.2 \ | ||||
| RUN pip install --target=/dependencies playwright~=1.48.0 \ | ||||
|     || echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled." | ||||
|  | ||||
| # Final image stage | ||||
| FROM python:${PYTHON_VERSION}-slim-bookworm | ||||
| LABEL org.opencontainers.image.source="https://github.com/dgtlmoon/changedetection.io" | ||||
|  | ||||
| RUN apt-get update && apt-get install -y --no-install-recommends \ | ||||
|     libxslt1.1 \ | ||||
|   | ||||
| @@ -1,6 +1,8 @@ | ||||
| recursive-include changedetectionio/api * | ||||
| recursive-include changedetectionio/apprise_plugin * | ||||
| recursive-include changedetectionio/blueprint * | ||||
| recursive-include changedetectionio/content_fetchers * | ||||
| recursive-include changedetectionio/conditions * | ||||
| recursive-include changedetectionio/model * | ||||
| recursive-include changedetectionio/processors * | ||||
| recursive-include changedetectionio/static * | ||||
|   | ||||
							
								
								
									
										13
									
								
								README.md
									
									
									
									
									
								
							
							
						
						| @@ -89,7 +89,7 @@ _Need an actual Chrome runner with Javascript support? We support fetching via W | ||||
| #### Key Features | ||||
|  | ||||
| - Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions! | ||||
| - Target elements with xPath(1.0) and CSS Selectors, Easily monitor complex JSON with JSONPath or jq | ||||
| - Target elements with xPath 1 and xPath 2, CSS Selectors, Easily monitor complex JSON with JSONPath or jq | ||||
| - Switch between fast non-JS and Chrome JS based "fetchers" | ||||
| - Track changes in PDF files (Monitor text changed in the PDF, Also monitor PDF filesize and checksums) | ||||
| - Easily specify how often a site should be checked | ||||
| @@ -105,13 +105,22 @@ We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) glob | ||||
|  | ||||
| Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/ | ||||
|  | ||||
| ### Schedule web page watches in any timezone, limit by day of week and time. | ||||
|  | ||||
| Easily set a re-check schedule, for example you could limit the web page change detection to only operate during business hours. | ||||
| Or perhaps based on a foreign timezone (for example, you want to check for the latest news-headlines in a foreign country at 0900 AM), | ||||
|  | ||||
| <img src="./docs/scheduler.png" style="max-width:80%;" alt="How to monitor web page changes according to a schedule"  title="How to monitor web page changes according to a schedule"  /> | ||||
|  | ||||
| Includes quick short-cut buttons to setup a schedule for **business hours only**, or **weekends**. | ||||
|  | ||||
| ### We have a Chrome extension! | ||||
|  | ||||
| Easily add the current web page to your changedetection.io tool, simply install the extension and click "Sync" to connect it to your existing changedetection.io install. | ||||
|  | ||||
| [<img src="./docs/chrome-extension-screenshot.png" style="max-width:80%;" alt="Chrome Extension to easily add the current web-page to detect a change."  title="Chrome Extension to easily add the current web-page to detect a change."  />](https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop) | ||||
|  | ||||
| [Goto the Chrome Webstore to download the extension.](https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop) | ||||
| [Goto the Chrome Webstore to download the extension.](https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop) ( Or check out the [GitHub repo](https://github.com/dgtlmoon/changedetection.io-browser-extension) )  | ||||
|  | ||||
| ## Installation | ||||
|  | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
| # Read more https://github.com/dgtlmoon/changedetection.io/wiki | ||||
|  | ||||
| __version__ = '0.46.04' | ||||
| __version__ = '0.49.11' | ||||
|  | ||||
| from changedetectionio.strtobool import strtobool | ||||
| from json.decoder import JSONDecodeError | ||||
| @@ -11,6 +11,7 @@ os.environ['EVENTLET_NO_GREENDNS'] = 'yes' | ||||
| import eventlet | ||||
| import eventlet.wsgi | ||||
| import getopt | ||||
| import platform | ||||
| import signal | ||||
| import socket | ||||
| import sys | ||||
| @@ -19,15 +20,15 @@ from changedetectionio import store | ||||
| from changedetectionio.flask_app import changedetection_app | ||||
| from loguru import logger | ||||
|  | ||||
|  | ||||
| # Only global so we can access it in the signal handler | ||||
| app = None | ||||
| datastore = None | ||||
|  | ||||
| def get_version(): | ||||
|     return __version__ | ||||
|  | ||||
| # Parent wrapper or OS sends us a SIGTERM/SIGINT, do everything required for a clean shutdown | ||||
| def sigshutdown_handler(_signo, _stack_frame): | ||||
|     global app | ||||
|     global datastore | ||||
|     name = signal.Signals(_signo).name | ||||
|     logger.critical(f'Shutdown: Got Signal - {name} ({_signo}), Saving DB to disk and calling shutdown') | ||||
|     datastore.sync_to_json() | ||||
| @@ -144,6 +145,19 @@ def main(): | ||||
|  | ||||
|     signal.signal(signal.SIGTERM, sigshutdown_handler) | ||||
|     signal.signal(signal.SIGINT, sigshutdown_handler) | ||||
|      | ||||
|     # Custom signal handler for memory cleanup | ||||
|     def sigusr_clean_handler(_signo, _stack_frame): | ||||
|         from changedetectionio.gc_cleanup import memory_cleanup | ||||
|         logger.info('SIGUSR1 received: Running memory cleanup') | ||||
|         return memory_cleanup(app) | ||||
|  | ||||
|     # Register the SIGUSR1 signal handler | ||||
|     # Only register the signal handler if running on Linux | ||||
|     if platform.system() == "Linux": | ||||
|         signal.signal(signal.SIGUSR1, sigusr_clean_handler) | ||||
|     else: | ||||
|         logger.info("SIGUSR1 handler only registered on Linux, skipped.") | ||||
|  | ||||
|     # Go into cleanup mode | ||||
|     if do_cleanup: | ||||
| @@ -160,11 +174,10 @@ def main(): | ||||
|                     ) | ||||
|  | ||||
|     # Monitored websites will not receive a Referer header when a user clicks on an outgoing link. | ||||
|     # @Note: Incompatible with password login (and maybe other features) for now, submit a PR! | ||||
|     @app.after_request | ||||
|     def hide_referrer(response): | ||||
|         if strtobool(os.getenv("HIDE_REFERER", 'false')): | ||||
|             response.headers["Referrer-Policy"] = "no-referrer" | ||||
|             response.headers["Referrer-Policy"] = "same-origin" | ||||
|  | ||||
|         return response | ||||
|  | ||||
|   | ||||
							
								
								
									
										62
									
								
								changedetectionio/api/Import.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,62 @@ | ||||
| import os | ||||
| from changedetectionio.strtobool import strtobool | ||||
| from flask_restful import abort, Resource | ||||
| from flask import request | ||||
| import validators | ||||
| from . import auth | ||||
|  | ||||
|  | ||||
| class Import(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
|         # datastore is a black box dependency | ||||
|         self.datastore = kwargs['datastore'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     def post(self): | ||||
|         """ | ||||
|         @api {post} /api/v1/import Import a list of watched URLs | ||||
|         @apiDescription Accepts a line-feed separated list of URLs to import, additionally with ?tag_uuids=(tag  id), ?tag=(name), ?proxy={key}, ?dedupe=true (default true) one URL per line. | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/import --data-binary @list-of-sites.txt -H"x-api-key:8a111a21bc2f8f1dd9b9353bbd46049a" | ||||
|         @apiName Import | ||||
|         @apiGroup Watch | ||||
|         @apiSuccess (200) {List} OK List of watch UUIDs added | ||||
|         @apiSuccess (500) {String} ERR Some other error | ||||
|         """ | ||||
|  | ||||
|         extras = {} | ||||
|  | ||||
|         if request.args.get('proxy'): | ||||
|             plist = self.datastore.proxy_list | ||||
|             if not request.args.get('proxy') in plist: | ||||
|                 return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400 | ||||
|             else: | ||||
|                 extras['proxy'] = request.args.get('proxy') | ||||
|  | ||||
|         dedupe = strtobool(request.args.get('dedupe', 'true')) | ||||
|  | ||||
|         tags = request.args.get('tag') | ||||
|         tag_uuids = request.args.get('tag_uuids') | ||||
|  | ||||
|         if tag_uuids: | ||||
|             tag_uuids = tag_uuids.split(',') | ||||
|  | ||||
|         urls = request.get_data().decode('utf8').splitlines() | ||||
|         added = [] | ||||
|         allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False')) | ||||
|         for url in urls: | ||||
|             url = url.strip() | ||||
|             if not len(url): | ||||
|                 continue | ||||
|  | ||||
|             # If hosts that only contain alphanumerics are allowed ("localhost" for example) | ||||
|             if not validators.url(url, simple_host=allow_simplehost): | ||||
|                 return f"Invalid or unsupported URL - {url}", 400 | ||||
|  | ||||
|             if dedupe and self.datastore.url_exists(url): | ||||
|                 continue | ||||
|  | ||||
|             new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags, tag_uuids=tag_uuids) | ||||
|             added.append(new_uuid) | ||||
|  | ||||
|         return added | ||||
							
								
								
									
										51
									
								
								changedetectionio/api/Search.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,51 @@ | ||||
| from flask_restful import Resource, abort | ||||
| from flask import request | ||||
| from . import auth | ||||
|  | ||||
| class Search(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
|         # datastore is a black box dependency | ||||
|         self.datastore = kwargs['datastore'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     def get(self): | ||||
|         """ | ||||
|         @api {get} /api/v1/search Search for watches | ||||
|         @apiDescription Search watches by URL or title text | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl "http://localhost:5000/api/v1/search?q=https://example.com/page1" -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl "http://localhost:5000/api/v1/search?q=https://example.com/page1?tag=Favourites" -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl "http://localhost:5000/api/v1/search?q=https://example.com?partial=true" -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|         @apiName Search | ||||
|         @apiGroup Watch Management | ||||
|         @apiQuery {String} q Search query to match against watch URLs and titles | ||||
|         @apiQuery {String} [tag] Optional name of tag to limit results (name not UUID) | ||||
|         @apiQuery {String} [partial] Allow partial matching of URL query | ||||
|         @apiSuccess (200) {Object} JSON Object containing matched watches | ||||
|         """ | ||||
|         query = request.args.get('q', '').strip() | ||||
|         tag_limit = request.args.get('tag', '').strip() | ||||
|         from changedetectionio.strtobool import strtobool | ||||
|         partial = bool(strtobool(request.args.get('partial', '0'))) if 'partial' in request.args else False | ||||
|  | ||||
|         # Require a search query | ||||
|         if not query: | ||||
|             abort(400, message="Search query 'q' parameter is required") | ||||
|  | ||||
|         # Use the search function from the datastore | ||||
|         matching_uuids = self.datastore.search_watches_for_url(query=query, tag_limit=tag_limit, partial=partial) | ||||
|  | ||||
|         # Build the response with watch details | ||||
|         results = {} | ||||
|         for uuid in matching_uuids: | ||||
|             watch = self.datastore.data['watching'].get(uuid) | ||||
|             results[uuid] = { | ||||
|                 'last_changed': watch.last_changed, | ||||
|                 'last_checked': watch['last_checked'], | ||||
|                 'last_error': watch['last_error'], | ||||
|                 'title': watch['title'], | ||||
|                 'url': watch['url'], | ||||
|                 'viewed': watch.viewed | ||||
|             } | ||||
|  | ||||
|         return results, 200 | ||||
							
								
								
									
										54
									
								
								changedetectionio/api/SystemInfo.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,54 @@ | ||||
| from flask_restful import Resource | ||||
| from . import auth | ||||
|  | ||||
|  | ||||
| class SystemInfo(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
|         # datastore is a black box dependency | ||||
|         self.datastore = kwargs['datastore'] | ||||
|         self.update_q = kwargs['update_q'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     def get(self): | ||||
|         """ | ||||
|         @api {get} /api/v1/systeminfo Return system info | ||||
|         @apiDescription Return some info about the current system state | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/systeminfo -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             HTTP/1.0 200 | ||||
|             { | ||||
|                 'queue_size': 10 , | ||||
|                 'overdue_watches': ["watch-uuid-list"], | ||||
|                 'uptime': 38344.55, | ||||
|                 'watch_count': 800, | ||||
|                 'version': "0.40.1" | ||||
|             } | ||||
|         @apiName Get Info | ||||
|         @apiGroup System Information | ||||
|         """ | ||||
|         import time | ||||
|         overdue_watches = [] | ||||
|  | ||||
|         # Check all watches and report which have not been checked but should have been | ||||
|  | ||||
|         for uuid, watch in self.datastore.data.get('watching', {}).items(): | ||||
|             # see if now - last_checked is greater than the time that should have been | ||||
|             # this is not super accurate (maybe they just edited it) but better than nothing | ||||
|             t = watch.threshold_seconds() | ||||
|             if not t: | ||||
|                 # Use the system wide default | ||||
|                 t = self.datastore.threshold_seconds | ||||
|  | ||||
|             time_since_check = time.time() - watch.get('last_checked') | ||||
|  | ||||
|             # Allow 5 minutes of grace time before we decide it's overdue | ||||
|             if time_since_check - (5 * 60) > t: | ||||
|                 overdue_watches.append(uuid) | ||||
|         from changedetectionio import __version__ as main_version | ||||
|         return { | ||||
|                    'queue_size': self.update_q.qsize(), | ||||
|                    'overdue_watches': overdue_watches, | ||||
|                    'uptime': round(time.time() - self.datastore.start_time, 2), | ||||
|                    'watch_count': len(self.datastore.data.get('watching', {})), | ||||
|                    'version': main_version | ||||
|                }, 200 | ||||
							
								
								
									
										156
									
								
								changedetectionio/api/Tags.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,156 @@ | ||||
| from flask_expects_json import expects_json | ||||
| from flask_restful import abort, Resource | ||||
| from flask import request | ||||
| from . import auth | ||||
|  | ||||
| # Import schemas from __init__.py | ||||
| from . import schema_tag, schema_create_tag, schema_update_tag | ||||
|  | ||||
|  | ||||
| class Tag(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
|         # datastore is a black box dependency | ||||
|         self.datastore = kwargs['datastore'] | ||||
|  | ||||
|     # Get information about a single tag | ||||
|     # curl http://localhost:5000/api/v1/tag/<string:uuid> | ||||
|     @auth.check_token | ||||
|     def get(self, uuid): | ||||
|         """ | ||||
|         @api {get} /api/v1/tag/:uuid Single tag - get data or toggle notification muting. | ||||
|         @apiDescription Retrieve tag information and set notification_muted status | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/tag/cc0cfffa-f449-477b-83ea-0caafd1dc091 -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl "http://localhost:5000/api/v1/tag/cc0cfffa-f449-477b-83ea-0caafd1dc091?muted=muted" -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|         @apiName Tag | ||||
|         @apiGroup Tag | ||||
|         @apiParam {uuid} uuid Tag unique ID. | ||||
|         @apiQuery {String} [muted] =`muted` or =`unmuted` , Sets the MUTE NOTIFICATIONS state | ||||
|         @apiSuccess (200) {String} OK When muted operation OR full JSON object of the tag | ||||
|         @apiSuccess (200) {JSON} TagJSON JSON Full JSON object of the tag | ||||
|         """ | ||||
|         from copy import deepcopy | ||||
|         tag = deepcopy(self.datastore.data['settings']['application']['tags'].get(uuid)) | ||||
|         if not tag: | ||||
|             abort(404, message=f'No tag exists with the UUID of {uuid}') | ||||
|  | ||||
|         if request.args.get('muted', '') == 'muted': | ||||
|             self.datastore.data['settings']['application']['tags'][uuid]['notification_muted'] = True | ||||
|             return "OK", 200 | ||||
|         elif request.args.get('muted', '') == 'unmuted': | ||||
|             self.datastore.data['settings']['application']['tags'][uuid]['notification_muted'] = False | ||||
|             return "OK", 200 | ||||
|  | ||||
|         return tag | ||||
|  | ||||
|     @auth.check_token | ||||
|     def delete(self, uuid): | ||||
|         """ | ||||
|         @api {delete} /api/v1/tag/:uuid Delete a tag and remove it from all watches | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/tag/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X DELETE -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|         @apiParam {uuid} uuid Tag unique ID. | ||||
|         @apiName DeleteTag | ||||
|         @apiGroup Tag | ||||
|         @apiSuccess (200) {String} OK Was deleted | ||||
|         """ | ||||
|         if not self.datastore.data['settings']['application']['tags'].get(uuid): | ||||
|             abort(400, message='No tag exists with the UUID of {}'.format(uuid)) | ||||
|  | ||||
|         # Delete the tag, and any tag reference | ||||
|         del self.datastore.data['settings']['application']['tags'][uuid] | ||||
|          | ||||
|         # Remove tag from all watches | ||||
|         for watch_uuid, watch in self.datastore.data['watching'].items(): | ||||
|             if watch.get('tags') and uuid in watch['tags']: | ||||
|                 watch['tags'].remove(uuid) | ||||
|  | ||||
|         return 'OK', 204 | ||||
|  | ||||
|     @auth.check_token | ||||
|     @expects_json(schema_update_tag) | ||||
|     def put(self, uuid): | ||||
|         """ | ||||
|         @api {put} /api/v1/tag/:uuid Update tag information | ||||
|         @apiExample {curl} Example usage: | ||||
|             Update (PUT) | ||||
|             curl http://localhost:5000/api/v1/tag/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X PUT -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"title": "New Tag Title"}' | ||||
|  | ||||
|         @apiDescription Updates an existing tag using JSON | ||||
|         @apiParam {uuid} uuid Tag unique ID. | ||||
|         @apiName UpdateTag | ||||
|         @apiGroup Tag | ||||
|         @apiSuccess (200) {String} OK Was updated | ||||
|         @apiSuccess (500) {String} ERR Some other error | ||||
|         """ | ||||
|         tag = self.datastore.data['settings']['application']['tags'].get(uuid) | ||||
|         if not tag: | ||||
|             abort(404, message='No tag exists with the UUID of {}'.format(uuid)) | ||||
|  | ||||
|         tag.update(request.json) | ||||
|         self.datastore.needs_write_urgent = True | ||||
|  | ||||
|         return "OK", 200 | ||||
|  | ||||
|  | ||||
|     @auth.check_token | ||||
|     # Only cares for {'title': 'xxxx'} | ||||
|     def post(self): | ||||
|         """ | ||||
|         @api {post} /api/v1/watch Create a single tag | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"name": "Work related"}' | ||||
|         @apiName Create | ||||
|         @apiGroup Tag | ||||
|         @apiSuccess (200) {String} OK Was created | ||||
|         @apiSuccess (500) {String} ERR Some other error | ||||
|         """ | ||||
|  | ||||
|         json_data = request.get_json() | ||||
|         title = json_data.get("title",'').strip() | ||||
|  | ||||
|  | ||||
|         new_uuid = self.datastore.add_tag(title=title) | ||||
|         if new_uuid: | ||||
|             return {'uuid': new_uuid}, 201 | ||||
|         else: | ||||
|             return "Invalid or unsupported tag", 400 | ||||
|  | ||||
| class Tags(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
|         # datastore is a black box dependency | ||||
|         self.datastore = kwargs['datastore'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     def get(self): | ||||
|         """ | ||||
|         @api {get} /api/v1/tags List tags | ||||
|         @apiDescription Return list of available tags | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/tags -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             { | ||||
|                 "cc0cfffa-f449-477b-83ea-0caafd1dc091": { | ||||
|                     "title": "Tech News", | ||||
|                     "notification_muted": false, | ||||
|                     "date_created": 1677103794 | ||||
|                 }, | ||||
|                 "e6f5fd5c-dbfe-468b-b8f3-f9d6ff5ad69b": { | ||||
|                     "title": "Shopping", | ||||
|                     "notification_muted": true, | ||||
|                     "date_created": 1676662819 | ||||
|                 } | ||||
|             } | ||||
|         @apiName ListTags | ||||
|         @apiGroup Tag Management | ||||
|         @apiSuccess (200) {String} OK JSON dict | ||||
|         """ | ||||
|         result = {} | ||||
|         for uuid, tag in self.datastore.data['settings']['application']['tags'].items(): | ||||
|             result[uuid] = { | ||||
|                 'date_created': tag.get('date_created', 0), | ||||
|                 'notification_muted': tag.get('notification_muted', False), | ||||
|                 'title': tag.get('title', ''), | ||||
|                 'uuid': tag.get('uuid') | ||||
|             } | ||||
|  | ||||
|         return result, 200 | ||||
| @@ -9,20 +9,9 @@ import validators | ||||
| from . import auth | ||||
| import copy | ||||
| 
 | ||||
| # See docs/README.md for rebuilding the docs/apidoc information | ||||
| # Import schemas from __init__.py | ||||
| from . import schema, schema_create_watch, schema_update_watch | ||||
| 
 | ||||
| from . import api_schema | ||||
| from ..model import watch_base | ||||
| 
 | ||||
| # Build a JSON Schema atleast partially based on our Watch model | ||||
| watch_base_config = watch_base() | ||||
| schema = api_schema.build_watch_json_schema(watch_base_config) | ||||
| 
 | ||||
| schema_create_watch = copy.deepcopy(schema) | ||||
| schema_create_watch['required'] = ['url'] | ||||
| 
 | ||||
| schema_update_watch = copy.deepcopy(schema) | ||||
| schema_update_watch['additionalProperties'] = False | ||||
| 
 | ||||
| class Watch(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
| @@ -58,7 +47,7 @@ class Watch(Resource): | ||||
|             abort(404, message='No watch exists with the UUID of {}'.format(uuid)) | ||||
| 
 | ||||
|         if request.args.get('recheck'): | ||||
|             self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True})) | ||||
|             self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|             return "OK", 200 | ||||
|         if request.args.get('paused', '') == 'paused': | ||||
|             self.datastore.data['watching'].get(uuid).pause() | ||||
| @@ -76,6 +65,7 @@ class Watch(Resource): | ||||
|         # Return without history, get that via another API call | ||||
|         # Properties are not returned as a JSON, so add the required props manually | ||||
|         watch['history_n'] = watch.history_n | ||||
|         # attr .last_changed will check for the last written text snapshot on change | ||||
|         watch['last_changed'] = watch.last_changed | ||||
|         watch['viewed'] = watch.viewed | ||||
|         return watch | ||||
| @@ -246,7 +236,7 @@ class CreateWatch(Resource): | ||||
| 
 | ||||
|         new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags) | ||||
|         if new_uuid: | ||||
|             self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True})) | ||||
|             self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid})) | ||||
|             return {'uuid': new_uuid}, 201 | ||||
|         else: | ||||
|             return "Invalid or unsupported URL", 400 | ||||
| @@ -284,8 +274,6 @@ class CreateWatch(Resource): | ||||
|         list = {} | ||||
| 
 | ||||
|         tag_limit = request.args.get('tag', '').lower() | ||||
| 
 | ||||
| 
 | ||||
|         for uuid, watch in self.datastore.data['watching'].items(): | ||||
|             # Watch tags by name (replace the other calls?) | ||||
|             tags = self.datastore.get_all_tags_for_watch(uuid=uuid) | ||||
| @@ -303,113 +291,7 @@ class CreateWatch(Resource): | ||||
| 
 | ||||
|         if request.args.get('recheck_all'): | ||||
|             for uuid in self.datastore.data['watching'].keys(): | ||||
|                 self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True})) | ||||
|                 self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|             return {'status': "OK"}, 200 | ||||
| 
 | ||||
|         return list, 200 | ||||
| 
 | ||||
| class Import(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
|         # datastore is a black box dependency | ||||
|         self.datastore = kwargs['datastore'] | ||||
| 
 | ||||
|     @auth.check_token | ||||
|     def post(self): | ||||
|         """ | ||||
|         @api {post} /api/v1/import Import a list of watched URLs | ||||
|         @apiDescription Accepts a line-feed separated list of URLs to import, additionally with ?tag_uuids=(tag  id), ?tag=(name), ?proxy={key}, ?dedupe=true (default true) one URL per line. | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/import --data-binary @list-of-sites.txt -H"x-api-key:8a111a21bc2f8f1dd9b9353bbd46049a" | ||||
|         @apiName Import | ||||
|         @apiGroup Watch | ||||
|         @apiSuccess (200) {List} OK List of watch UUIDs added | ||||
|         @apiSuccess (500) {String} ERR Some other error | ||||
|         """ | ||||
| 
 | ||||
|         extras = {} | ||||
| 
 | ||||
|         if request.args.get('proxy'): | ||||
|             plist = self.datastore.proxy_list | ||||
|             if not request.args.get('proxy') in plist: | ||||
|                 return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400 | ||||
|             else: | ||||
|                 extras['proxy'] = request.args.get('proxy') | ||||
| 
 | ||||
|         dedupe = strtobool(request.args.get('dedupe', 'true')) | ||||
| 
 | ||||
|         tags = request.args.get('tag') | ||||
|         tag_uuids = request.args.get('tag_uuids') | ||||
| 
 | ||||
|         if tag_uuids: | ||||
|             tag_uuids = tag_uuids.split(',') | ||||
| 
 | ||||
|         urls = request.get_data().decode('utf8').splitlines() | ||||
|         added = [] | ||||
|         allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False')) | ||||
|         for url in urls: | ||||
|             url = url.strip() | ||||
|             if not len(url): | ||||
|                 continue | ||||
| 
 | ||||
|             # If hosts that only contain alphanumerics are allowed ("localhost" for example) | ||||
|             if not validators.url(url, simple_host=allow_simplehost): | ||||
|                 return f"Invalid or unsupported URL - {url}", 400 | ||||
| 
 | ||||
|             if dedupe and self.datastore.url_exists(url): | ||||
|                 continue | ||||
| 
 | ||||
|             new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags, tag_uuids=tag_uuids) | ||||
|             added.append(new_uuid) | ||||
| 
 | ||||
|         return added | ||||
| 
 | ||||
| class SystemInfo(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
|         # datastore is a black box dependency | ||||
|         self.datastore = kwargs['datastore'] | ||||
|         self.update_q = kwargs['update_q'] | ||||
| 
 | ||||
|     @auth.check_token | ||||
|     def get(self): | ||||
|         """ | ||||
|         @api {get} /api/v1/systeminfo Return system info | ||||
|         @apiDescription Return some info about the current system state | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/systeminfo -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             HTTP/1.0 200 | ||||
|             { | ||||
|                 'queue_size': 10 , | ||||
|                 'overdue_watches': ["watch-uuid-list"], | ||||
|                 'uptime': 38344.55, | ||||
|                 'watch_count': 800, | ||||
|                 'version': "0.40.1" | ||||
|             } | ||||
|         @apiName Get Info | ||||
|         @apiGroup System Information | ||||
|         """ | ||||
|         import time | ||||
|         overdue_watches = [] | ||||
| 
 | ||||
|         # Check all watches and report which have not been checked but should have been | ||||
| 
 | ||||
|         for uuid, watch in self.datastore.data.get('watching', {}).items(): | ||||
|             # see if now - last_checked is greater than the time that should have been | ||||
|             # this is not super accurate (maybe they just edited it) but better than nothing | ||||
|             t = watch.threshold_seconds() | ||||
|             if not t: | ||||
|                 # Use the system wide default | ||||
|                 t = self.datastore.threshold_seconds | ||||
| 
 | ||||
|             time_since_check = time.time() - watch.get('last_checked') | ||||
| 
 | ||||
|             # Allow 5 minutes of grace time before we decide it's overdue | ||||
|             if time_since_check - (5 * 60) > t: | ||||
|                 overdue_watches.append(uuid) | ||||
|         from changedetectionio import __version__ as main_version | ||||
|         return { | ||||
|                    'queue_size': self.update_q.qsize(), | ||||
|                    'overdue_watches': overdue_watches, | ||||
|                    'uptime': round(time.time() - self.datastore.start_time, 2), | ||||
|                    'watch_count': len(self.datastore.data.get('watching', {})), | ||||
|                    'version': main_version | ||||
|                }, 200 | ||||
|         return list, 200 | ||||
| @@ -0,0 +1,26 @@ | ||||
| import copy | ||||
| from . import api_schema | ||||
| from ..model import watch_base | ||||
|  | ||||
| # Build a JSON Schema atleast partially based on our Watch model | ||||
| watch_base_config = watch_base() | ||||
| schema = api_schema.build_watch_json_schema(watch_base_config) | ||||
|  | ||||
| schema_create_watch = copy.deepcopy(schema) | ||||
| schema_create_watch['required'] = ['url'] | ||||
|  | ||||
| schema_update_watch = copy.deepcopy(schema) | ||||
| schema_update_watch['additionalProperties'] = False | ||||
|  | ||||
| # Tag schema is also based on watch_base since Tag inherits from it | ||||
| schema_tag = copy.deepcopy(schema) | ||||
| schema_create_tag = copy.deepcopy(schema_tag) | ||||
| schema_create_tag['required'] = ['title'] | ||||
| schema_update_tag = copy.deepcopy(schema_tag) | ||||
| schema_update_tag['additionalProperties'] = False | ||||
|  | ||||
| # Import all API resources | ||||
| from .Watch import Watch, WatchHistory, WatchSingleHistory, CreateWatch | ||||
| from .Tags import Tags, Tag | ||||
| from .Import import Import | ||||
| from .SystemInfo import SystemInfo | ||||
|   | ||||
| @@ -112,6 +112,35 @@ def build_watch_json_schema(d): | ||||
|  | ||||
|     schema['properties']['time_between_check'] = build_time_between_check_json_schema() | ||||
|  | ||||
|     schema['properties']['browser_steps'] = { | ||||
|         "anyOf": [ | ||||
|             { | ||||
|                 "type": "array", | ||||
|                 "items": { | ||||
|                     "type": "object", | ||||
|                     "properties": { | ||||
|                         "operation": { | ||||
|                             "type": ["string", "null"], | ||||
|                             "maxLength": 5000  # Allows null and any string up to 5000 chars (including "") | ||||
|                         }, | ||||
|                         "selector": { | ||||
|                             "type": ["string", "null"], | ||||
|                             "maxLength": 5000 | ||||
|                         }, | ||||
|                         "optional_value": { | ||||
|                             "type": ["string", "null"], | ||||
|                             "maxLength": 5000 | ||||
|                         } | ||||
|                     }, | ||||
|                     "required": ["operation", "selector", "optional_value"], | ||||
|                     "additionalProperties": False  # No extra keys allowed | ||||
|                 } | ||||
|             }, | ||||
|             {"type": "null"},  # Allows null for `browser_steps` | ||||
|             {"type": "array", "maxItems": 0}  # Allows empty array [] | ||||
|         ] | ||||
|     } | ||||
|  | ||||
|     # headers ? | ||||
|     return schema | ||||
|  | ||||
|   | ||||
| @@ -11,22 +11,14 @@ def check_token(f): | ||||
|         datastore = args[0].datastore | ||||
|  | ||||
|         config_api_token_enabled = datastore.data['settings']['application'].get('api_access_token_enabled') | ||||
|         if not config_api_token_enabled: | ||||
|             return | ||||
|  | ||||
|         try: | ||||
|             api_key_header = request.headers['x-api-key'] | ||||
|         except KeyError: | ||||
|             return make_response( | ||||
|                 jsonify("No authorization x-api-key header."), 403 | ||||
|             ) | ||||
|  | ||||
|         config_api_token = datastore.data['settings']['application'].get('api_access_token') | ||||
|  | ||||
|         if api_key_header != config_api_token: | ||||
|             return make_response( | ||||
|                 jsonify("Invalid access - API key invalid."), 403 | ||||
|             ) | ||||
|         # config_api_token_enabled - a UI option in settings if access should obey the key or not | ||||
|         if config_api_token_enabled: | ||||
|             if request.headers.get('x-api-key') != config_api_token: | ||||
|                 return make_response( | ||||
|                     jsonify("Invalid access - API key invalid."), 403 | ||||
|                 ) | ||||
|  | ||||
|         return f(*args, **kwargs) | ||||
|  | ||||
|   | ||||
| @@ -1,11 +0,0 @@ | ||||
| import apprise | ||||
|  | ||||
| # Create our AppriseAsset and populate it with some of our new values: | ||||
| # https://github.com/caronc/apprise/wiki/Development_API#the-apprise-asset-object | ||||
| asset = apprise.AppriseAsset( | ||||
|    image_url_logo='https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png' | ||||
| ) | ||||
|  | ||||
| asset.app_id = "changedetection.io" | ||||
| asset.app_desc = "ChangeDetection.io best and simplest website monitoring and change detection" | ||||
| asset.app_url = "https://changedetection.io" | ||||
| @@ -1,78 +0,0 @@ | ||||
| # include the decorator | ||||
| from apprise.decorators import notify | ||||
|  | ||||
| @notify(on="delete") | ||||
| @notify(on="deletes") | ||||
| @notify(on="get") | ||||
| @notify(on="gets") | ||||
| @notify(on="post") | ||||
| @notify(on="posts") | ||||
| @notify(on="put") | ||||
| @notify(on="puts") | ||||
| def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs): | ||||
|     import requests | ||||
|     import json | ||||
|     from apprise.utils import parse_url as apprise_parse_url | ||||
|     from apprise import URLBase | ||||
|  | ||||
|     url = kwargs['meta'].get('url') | ||||
|  | ||||
|     if url.startswith('post'): | ||||
|         r = requests.post | ||||
|     elif url.startswith('get'): | ||||
|         r = requests.get | ||||
|     elif url.startswith('put'): | ||||
|         r = requests.put | ||||
|     elif url.startswith('delete'): | ||||
|         r = requests.delete | ||||
|  | ||||
|     url = url.replace('post://', 'http://') | ||||
|     url = url.replace('posts://', 'https://') | ||||
|     url = url.replace('put://', 'http://') | ||||
|     url = url.replace('puts://', 'https://') | ||||
|     url = url.replace('get://', 'http://') | ||||
|     url = url.replace('gets://', 'https://') | ||||
|     url = url.replace('put://', 'http://') | ||||
|     url = url.replace('puts://', 'https://') | ||||
|     url = url.replace('delete://', 'http://') | ||||
|     url = url.replace('deletes://', 'https://') | ||||
|  | ||||
|     headers = {} | ||||
|     params = {} | ||||
|     auth = None | ||||
|  | ||||
|     # Convert /foobar?+some-header=hello to proper header dictionary | ||||
|     results = apprise_parse_url(url) | ||||
|     if results: | ||||
|         # Add our headers that the user can potentially over-ride if they wish | ||||
|         # to to our returned result set and tidy entries by unquoting them | ||||
|         headers = {URLBase.unquote(x): URLBase.unquote(y) | ||||
|                    for x, y in results['qsd+'].items()} | ||||
|  | ||||
|         # https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation | ||||
|         # In Apprise, it relies on prefixing each request arg with "-", because it uses say &method=update as a flag for apprise | ||||
|         # but here we are making straight requests, so we need todo convert this against apprise's logic | ||||
|         for k, v in results['qsd'].items(): | ||||
|             if not k.strip('+-') in results['qsd+'].keys(): | ||||
|                 params[URLBase.unquote(k)] = URLBase.unquote(v) | ||||
|  | ||||
|         # Determine Authentication | ||||
|         auth = '' | ||||
|         if results.get('user') and results.get('password'): | ||||
|             auth = (URLBase.unquote(results.get('user')), URLBase.unquote(results.get('user'))) | ||||
|         elif results.get('user'): | ||||
|             auth = (URLBase.unquote(results.get('user'))) | ||||
|  | ||||
|     # Try to auto-guess if it's JSON | ||||
|     try: | ||||
|         json.loads(body) | ||||
|         headers['Content-Type'] = 'application/json; charset=utf-8' | ||||
|     except ValueError as e: | ||||
|         pass | ||||
|  | ||||
|     r(results.get('url'), | ||||
|       auth=auth, | ||||
|       data=body.encode('utf-8') if type(body) is str else body, | ||||
|       headers=headers, | ||||
|       params=params | ||||
|       ) | ||||
							
								
								
									
										16
									
								
								changedetectionio/apprise_plugin/assets.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,16 @@ | ||||
| from apprise import AppriseAsset | ||||
|  | ||||
| # Refer to: | ||||
| # https://github.com/caronc/apprise/wiki/Development_API#the-apprise-asset-object | ||||
|  | ||||
| APPRISE_APP_ID = "changedetection.io" | ||||
| APPRISE_APP_DESC = "ChangeDetection.io best and simplest website monitoring and change detection" | ||||
| APPRISE_APP_URL = "https://changedetection.io" | ||||
| APPRISE_AVATAR_URL = "https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png" | ||||
|  | ||||
| apprise_asset = AppriseAsset( | ||||
|     app_id=APPRISE_APP_ID, | ||||
|     app_desc=APPRISE_APP_DESC, | ||||
|     app_url=APPRISE_APP_URL, | ||||
|     image_url_logo=APPRISE_AVATAR_URL, | ||||
| ) | ||||
							
								
								
									
										112
									
								
								changedetectionio/apprise_plugin/custom_handlers.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,112 @@ | ||||
| import json | ||||
| import re | ||||
| from urllib.parse import unquote_plus | ||||
|  | ||||
| import requests | ||||
| from apprise.decorators import notify | ||||
| from apprise.utils.parse import parse_url as apprise_parse_url | ||||
| from loguru import logger | ||||
| from requests.structures import CaseInsensitiveDict | ||||
|  | ||||
| SUPPORTED_HTTP_METHODS = {"get", "post", "put", "delete", "patch", "head"} | ||||
|  | ||||
|  | ||||
| def notify_supported_methods(func): | ||||
|     for method in SUPPORTED_HTTP_METHODS: | ||||
|         func = notify(on=method)(func) | ||||
|         # Add support for https, for each supported http method | ||||
|         func = notify(on=f"{method}s")(func) | ||||
|     return func | ||||
|  | ||||
|  | ||||
| def _get_auth(parsed_url: dict) -> str | tuple[str, str]: | ||||
|     user: str | None = parsed_url.get("user") | ||||
|     password: str | None = parsed_url.get("password") | ||||
|  | ||||
|     if user is not None and password is not None: | ||||
|         return (unquote_plus(user), unquote_plus(password)) | ||||
|  | ||||
|     if user is not None: | ||||
|         return unquote_plus(user) | ||||
|  | ||||
|     return "" | ||||
|  | ||||
|  | ||||
| def _get_headers(parsed_url: dict, body: str) -> CaseInsensitiveDict: | ||||
|     headers = CaseInsensitiveDict( | ||||
|         {unquote_plus(k).title(): unquote_plus(v) for k, v in parsed_url["qsd+"].items()} | ||||
|     ) | ||||
|  | ||||
|     # If Content-Type is not specified, guess if the body is a valid JSON | ||||
|     if headers.get("Content-Type") is None: | ||||
|         try: | ||||
|             json.loads(body) | ||||
|             headers["Content-Type"] = "application/json; charset=utf-8" | ||||
|         except Exception: | ||||
|             pass | ||||
|  | ||||
|     return headers | ||||
|  | ||||
|  | ||||
| def _get_params(parsed_url: dict) -> CaseInsensitiveDict: | ||||
|     # https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation | ||||
|     # In Apprise, it relies on prefixing each request arg with "-", because it uses say &method=update as a flag for apprise | ||||
|     # but here we are making straight requests, so we need todo convert this against apprise's logic | ||||
|     params = CaseInsensitiveDict( | ||||
|         { | ||||
|             unquote_plus(k): unquote_plus(v) | ||||
|             for k, v in parsed_url["qsd"].items() | ||||
|             if k.strip("-") not in parsed_url["qsd-"] | ||||
|             and k.strip("+") not in parsed_url["qsd+"] | ||||
|         } | ||||
|     ) | ||||
|  | ||||
|     return params | ||||
|  | ||||
|  | ||||
| @notify_supported_methods | ||||
| def apprise_http_custom_handler( | ||||
|     body: str, | ||||
|     title: str, | ||||
|     notify_type: str, | ||||
|     meta: dict, | ||||
|     *args, | ||||
|     **kwargs, | ||||
| ) -> bool: | ||||
|     url: str = meta.get("url") | ||||
|     schema: str = meta.get("schema") | ||||
|     method: str = re.sub(r"s$", "", schema).upper() | ||||
|  | ||||
|     # Convert /foobar?+some-header=hello to proper header dictionary | ||||
|     parsed_url: dict[str, str | dict | None] | None = apprise_parse_url(url) | ||||
|     if parsed_url is None: | ||||
|         return False | ||||
|  | ||||
|     auth = _get_auth(parsed_url=parsed_url) | ||||
|     headers = _get_headers(parsed_url=parsed_url, body=body) | ||||
|     params = _get_params(parsed_url=parsed_url) | ||||
|  | ||||
|     url = re.sub(rf"^{schema}", "https" if schema.endswith("s") else "http", parsed_url.get("url")) | ||||
|  | ||||
|     try: | ||||
|         response = requests.request( | ||||
|             method=method, | ||||
|             url=url, | ||||
|             auth=auth, | ||||
|             headers=headers, | ||||
|             params=params, | ||||
|             data=body.encode("utf-8") if isinstance(body, str) else body, | ||||
|         ) | ||||
|  | ||||
|         response.raise_for_status() | ||||
|  | ||||
|         logger.info(f"Successfully sent custom notification to {url}") | ||||
|         return True | ||||
|  | ||||
|     except requests.RequestException as e: | ||||
|         logger.error(f"Remote host error while sending custom notification to {url}: {e}") | ||||
|         return False | ||||
|  | ||||
|     except Exception as e: | ||||
|         logger.error(f"Unexpected error occurred while sending custom notification to {url}: {e}") | ||||
|         return False | ||||
							
								
								
									
										33
									
								
								changedetectionio/auth_decorator.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,33 @@ | ||||
| import os | ||||
| from functools import wraps | ||||
| from flask import current_app, redirect, request | ||||
| from loguru import logger | ||||
|  | ||||
| def login_optionally_required(func): | ||||
|     """ | ||||
|     If password authentication is enabled, verify the user is logged in. | ||||
|     To be used as a decorator for routes that should optionally require login. | ||||
|     This version is blueprint-friendly as it uses current_app instead of directly accessing app. | ||||
|     """ | ||||
|     @wraps(func) | ||||
|     def decorated_view(*args, **kwargs): | ||||
|         from flask import current_app | ||||
|         import flask_login | ||||
|         from flask_login import current_user | ||||
|  | ||||
|         # Access datastore through the app config | ||||
|         datastore = current_app.config['DATASTORE'] | ||||
|         has_password_enabled = datastore.data['settings']['application'].get('password') or os.getenv("SALTED_PASS", False) | ||||
|  | ||||
|         # Permitted | ||||
|         if request.endpoint and 'diff_history_page' in request.endpoint and datastore.data['settings']['application'].get('shared_diff_access'): | ||||
|             return func(*args, **kwargs) | ||||
|         elif request.method in flask_login.config.EXEMPT_METHODS: | ||||
|             return func(*args, **kwargs) | ||||
|         elif current_app.config.get('LOGIN_DISABLED'): | ||||
|             return func(*args, **kwargs) | ||||
|         elif has_password_enabled and not current_user.is_authenticated: | ||||
|             return current_app.login_manager.unauthorized() | ||||
|  | ||||
|         return func(*args, **kwargs) | ||||
|     return decorated_view | ||||
							
								
								
									
										164
									
								
								changedetectionio/blueprint/backups/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,164 @@ | ||||
| import datetime | ||||
| import glob | ||||
| import threading | ||||
|  | ||||
| from flask import Blueprint, render_template, send_from_directory, flash, url_for, redirect, abort | ||||
| import os | ||||
|  | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from changedetectionio.flask_app import login_optionally_required | ||||
| from loguru import logger | ||||
|  | ||||
| BACKUP_FILENAME_FORMAT = "changedetection-backup-{}.zip" | ||||
|  | ||||
|  | ||||
| def create_backup(datastore_path, watches: dict): | ||||
|     logger.debug("Creating backup...") | ||||
|     import zipfile | ||||
|     from pathlib import Path | ||||
|  | ||||
|     # create a ZipFile object | ||||
|     timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") | ||||
|     backupname = BACKUP_FILENAME_FORMAT.format(timestamp) | ||||
|     backup_filepath = os.path.join(datastore_path, backupname) | ||||
|  | ||||
|     with zipfile.ZipFile(backup_filepath.replace('.zip', '.tmp'), "w", | ||||
|                          compression=zipfile.ZIP_DEFLATED, | ||||
|                          compresslevel=8) as zipObj: | ||||
|  | ||||
|         # Add the index | ||||
|         zipObj.write(os.path.join(datastore_path, "url-watches.json"), arcname="url-watches.json") | ||||
|  | ||||
|         # Add the flask app secret | ||||
|         zipObj.write(os.path.join(datastore_path, "secret.txt"), arcname="secret.txt") | ||||
|  | ||||
|         # Add any data in the watch data directory. | ||||
|         for uuid, w in watches.items(): | ||||
|             for f in Path(w.watch_data_dir).glob('*'): | ||||
|                 zipObj.write(f, | ||||
|                              # Use the full path to access the file, but make the file 'relative' in the Zip. | ||||
|                              arcname=os.path.join(f.parts[-2], f.parts[-1]), | ||||
|                              compress_type=zipfile.ZIP_DEFLATED, | ||||
|                              compresslevel=8) | ||||
|  | ||||
|         # Create a list file with just the URLs, so it's easier to port somewhere else in the future | ||||
|         list_file = "url-list.txt" | ||||
|         with open(os.path.join(datastore_path, list_file), "w") as f: | ||||
|             for uuid in watches: | ||||
|                 url = watches[uuid]["url"] | ||||
|                 f.write("{}\r\n".format(url)) | ||||
|         list_with_tags_file = "url-list-with-tags.txt" | ||||
|         with open( | ||||
|                 os.path.join(datastore_path, list_with_tags_file), "w" | ||||
|         ) as f: | ||||
|             for uuid in watches: | ||||
|                 url = watches[uuid].get('url') | ||||
|                 tag = watches[uuid].get('tags', {}) | ||||
|                 f.write("{} {}\r\n".format(url, tag)) | ||||
|  | ||||
|         # Add it to the Zip | ||||
|         zipObj.write( | ||||
|             os.path.join(datastore_path, list_file), | ||||
|             arcname=list_file, | ||||
|             compress_type=zipfile.ZIP_DEFLATED, | ||||
|             compresslevel=8, | ||||
|         ) | ||||
|         zipObj.write( | ||||
|             os.path.join(datastore_path, list_with_tags_file), | ||||
|             arcname=list_with_tags_file, | ||||
|             compress_type=zipfile.ZIP_DEFLATED, | ||||
|             compresslevel=8, | ||||
|         ) | ||||
|  | ||||
|     # Now it's done, rename it so it shows up finally and its completed being written. | ||||
|     os.rename(backup_filepath.replace('.zip', '.tmp'), backup_filepath.replace('.tmp', '.zip')) | ||||
|  | ||||
|  | ||||
| def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     backups_blueprint = Blueprint('backups', __name__, template_folder="templates") | ||||
|     backup_threads = [] | ||||
|  | ||||
|     @login_optionally_required | ||||
|     @backups_blueprint.route("/request-backup", methods=['GET']) | ||||
|     def request_backup(): | ||||
|         if any(thread.is_alive() for thread in backup_threads): | ||||
|             flash("A backup is already running, check back in a few minutes", "error") | ||||
|             return redirect(url_for('backups.index')) | ||||
|  | ||||
|         if len(find_backups()) > int(os.getenv("MAX_NUMBER_BACKUPS", 100)): | ||||
|             flash("Maximum number of backups reached, please remove some", "error") | ||||
|             return redirect(url_for('backups.index')) | ||||
|  | ||||
|         # Be sure we're written fresh | ||||
|         datastore.sync_to_json() | ||||
|         zip_thread = threading.Thread(target=create_backup, args=(datastore.datastore_path, datastore.data.get("watching"))) | ||||
|         zip_thread.start() | ||||
|         backup_threads.append(zip_thread) | ||||
|         flash("Backup building in background, check back in a few minutes.") | ||||
|  | ||||
|         return redirect(url_for('backups.index')) | ||||
|  | ||||
|     def find_backups(): | ||||
|         backup_filepath = os.path.join(datastore.datastore_path, BACKUP_FILENAME_FORMAT.format("*")) | ||||
|         backups = glob.glob(backup_filepath) | ||||
|         backup_info = [] | ||||
|  | ||||
|         for backup in backups: | ||||
|             size = os.path.getsize(backup) / (1024 * 1024) | ||||
|             creation_time = os.path.getctime(backup) | ||||
|             backup_info.append({ | ||||
|                 'filename': os.path.basename(backup), | ||||
|                 'filesize': f"{size:.2f}", | ||||
|                 'creation_time': creation_time | ||||
|             }) | ||||
|  | ||||
|         backup_info.sort(key=lambda x: x['creation_time'], reverse=True) | ||||
|  | ||||
|         return backup_info | ||||
|  | ||||
|     @login_optionally_required | ||||
|     @backups_blueprint.route("/download/<string:filename>", methods=['GET']) | ||||
|     def download_backup(filename): | ||||
|         import re | ||||
|         filename = filename.strip() | ||||
|         backup_filename_regex = BACKUP_FILENAME_FORMAT.format("\d+") | ||||
|  | ||||
|         full_path = os.path.join(os.path.abspath(datastore.datastore_path), filename) | ||||
|         if not full_path.startswith(os.path.abspath(datastore.datastore_path)): | ||||
|             abort(404) | ||||
|  | ||||
|         if filename == 'latest': | ||||
|             backups = find_backups() | ||||
|             filename = backups[0]['filename'] | ||||
|  | ||||
|         if not re.match(r"^" + backup_filename_regex + "$", filename): | ||||
|             abort(400)  # Bad Request if the filename doesn't match the pattern | ||||
|  | ||||
|         logger.debug(f"Backup download request for '{full_path}'") | ||||
|         return send_from_directory(os.path.abspath(datastore.datastore_path), filename, as_attachment=True) | ||||
|  | ||||
|     @login_optionally_required | ||||
|     @backups_blueprint.route("", methods=['GET']) | ||||
|     def index(): | ||||
|         backups = find_backups() | ||||
|         output = render_template("overview.html", | ||||
|                                  available_backups=backups, | ||||
|                                  backup_running=any(thread.is_alive() for thread in backup_threads) | ||||
|                                  ) | ||||
|  | ||||
|         return output | ||||
|  | ||||
|     @login_optionally_required | ||||
|     @backups_blueprint.route("/remove-backups", methods=['GET']) | ||||
|     def remove_backups(): | ||||
|  | ||||
|         backup_filepath = os.path.join(datastore.datastore_path, BACKUP_FILENAME_FORMAT.format("*")) | ||||
|         backups = glob.glob(backup_filepath) | ||||
|         for backup in backups: | ||||
|             os.unlink(backup) | ||||
|  | ||||
|         flash("Backups were deleted.") | ||||
|  | ||||
|         return redirect(url_for('backups.index')) | ||||
|  | ||||
|     return backups_blueprint | ||||
							
								
								
									
										36
									
								
								changedetectionio/blueprint/backups/templates/overview.html
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,36 @@ | ||||
| {% extends 'base.html' %} | ||||
| {% block content %} | ||||
|     {% from '_helpers.html' import render_simple_field, render_field %} | ||||
|     <div class="edit-form"> | ||||
|         <div class="box-wrap inner"> | ||||
|             <h4>Backups</h4> | ||||
|             {% if backup_running %} | ||||
|                 <p> | ||||
|                     <strong>A backup is running!</strong> | ||||
|                 </p> | ||||
|             {% endif %} | ||||
|             <p> | ||||
|                 Here you can download and request a new backup, when a backup is completed you will see it listed below. | ||||
|             </p> | ||||
|             <br> | ||||
|                 {% if available_backups %} | ||||
|                     <ul> | ||||
|                     {% for backup in available_backups %} | ||||
|                         <li><a href="{{ url_for('backups.download_backup', filename=backup["filename"]) }}">{{ backup["filename"] }}</a> {{  backup["filesize"] }} Mb</li> | ||||
|                     {% endfor %} | ||||
|                     </ul> | ||||
|                 {% else %} | ||||
|                     <p> | ||||
|                     <strong>No backups found.</strong> | ||||
|                     </p> | ||||
|                 {% endif %} | ||||
|  | ||||
|             <a class="pure-button pure-button-primary" href="{{ url_for('backups.request_backup') }}">Create backup</a> | ||||
|             {% if available_backups %} | ||||
|                 <a class="pure-button button-small button-error " href="{{ url_for('backups.remove_backups') }}">Remove backups</a> | ||||
|             {% endif %} | ||||
|         </div> | ||||
|     </div> | ||||
|  | ||||
|  | ||||
| {% endblock %} | ||||
| @@ -22,7 +22,9 @@ from loguru import logger | ||||
|  | ||||
| browsersteps_sessions = {} | ||||
| io_interface_context = None | ||||
|  | ||||
| import json | ||||
| import hashlib | ||||
| from flask import Response | ||||
|  | ||||
| def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     browser_steps_blueprint = Blueprint('browser_steps', __name__, template_folder="templates") | ||||
| @@ -31,10 +33,8 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|         from . import nonContext | ||||
|         from . import browser_steps | ||||
|         import time | ||||
|         global browsersteps_sessions | ||||
|         global io_interface_context | ||||
|  | ||||
|  | ||||
|         # We keep the playwright session open for many minutes | ||||
|         keepalive_seconds = int(os.getenv('BROWSERSTEPS_MINUTES_KEEPALIVE', 10)) * 60 | ||||
|  | ||||
| @@ -85,7 +85,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|         browsersteps_start_session['browserstepper'] = browser_steps.browsersteps_live_ui( | ||||
|             playwright_browser=browsersteps_start_session['browser'], | ||||
|             proxy=proxy, | ||||
|             start_url=datastore.data['watching'][watch_uuid].get('url'), | ||||
|             start_url=datastore.data['watching'][watch_uuid].link, | ||||
|             headers=datastore.data['watching'][watch_uuid].get('headers') | ||||
|         ) | ||||
|  | ||||
| @@ -101,8 +101,6 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|         # A new session was requested, return sessionID | ||||
|  | ||||
|         import uuid | ||||
|         global browsersteps_sessions | ||||
|  | ||||
|         browsersteps_session_id = str(uuid.uuid4()) | ||||
|         watch_uuid = request.args.get('uuid') | ||||
|  | ||||
| @@ -146,7 +144,6 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     def browsersteps_ui_update(): | ||||
|         import base64 | ||||
|         import playwright._impl._errors | ||||
|         global browsersteps_sessions | ||||
|         from changedetectionio.blueprint.browser_steps import browser_steps | ||||
|  | ||||
|         remaining =0 | ||||
| @@ -160,14 +157,13 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|         if not browsersteps_sessions.get(browsersteps_session_id): | ||||
|             return make_response('No session exists under that ID', 500) | ||||
|  | ||||
|  | ||||
|         is_last_step = False | ||||
|         # Actions - step/apply/etc, do the thing and return state | ||||
|         if request.method == 'POST': | ||||
|             # @todo - should always be an existing session | ||||
|             step_operation = request.form.get('operation') | ||||
|             step_selector = request.form.get('selector') | ||||
|             step_optional_value = request.form.get('optional_value') | ||||
|             step_n = int(request.form.get('step_n')) | ||||
|             is_last_step = strtobool(request.form.get('is_last_step')) | ||||
|  | ||||
|             # @todo try.. accept.. nice errors not popups.. | ||||
| @@ -182,16 +178,6 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|                 # Try to find something of value to give back to the user | ||||
|                 return make_response(str(e).splitlines()[0], 401) | ||||
|  | ||||
|             # Get visual selector ready/update its data (also use the current filter info from the page?) | ||||
|             # When the last 'apply' button was pressed | ||||
|             # @todo this adds overhead because the xpath selection is happening twice | ||||
|             u = browsersteps_sessions[browsersteps_session_id]['browserstepper'].page.url | ||||
|             if is_last_step and u: | ||||
|                 (screenshot, xpath_data) = browsersteps_sessions[browsersteps_session_id]['browserstepper'].request_visualselector_data() | ||||
|                 watch = datastore.data['watching'].get(uuid) | ||||
|                 if watch: | ||||
|                     watch.save_screenshot(screenshot=screenshot) | ||||
|                     watch.save_xpath_data(data=xpath_data) | ||||
|  | ||||
| #        if not this_session.page: | ||||
| #            cleanup_playwright_session() | ||||
| @@ -199,31 +185,35 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|  | ||||
|         # Screenshots and other info only needed on requesting a step (POST) | ||||
|         try: | ||||
|             state = browsersteps_sessions[browsersteps_session_id]['browserstepper'].get_current_state() | ||||
|             (screenshot, xpath_data) = browsersteps_sessions[browsersteps_session_id]['browserstepper'].get_current_state() | ||||
|             if is_last_step: | ||||
|                 watch = datastore.data['watching'].get(uuid) | ||||
|                 u = browsersteps_sessions[browsersteps_session_id]['browserstepper'].page.url | ||||
|                 if watch and u: | ||||
|                     watch.save_screenshot(screenshot=screenshot) | ||||
|                     watch.save_xpath_data(data=xpath_data) | ||||
|  | ||||
|         except playwright._impl._api_types.Error as e: | ||||
|             return make_response("Browser session ran out of time :( Please reload this page."+str(e), 401) | ||||
|         except Exception as e: | ||||
|             return make_response("Error fetching screenshot and element data - " + str(e), 401) | ||||
|  | ||||
|         # Use send_file() which is way faster than read/write loop on bytes | ||||
|         import json | ||||
|         from tempfile import mkstemp | ||||
|         from flask import send_file | ||||
|         tmp_fd, tmp_file = mkstemp(text=True, suffix=".json", prefix="changedetectionio-") | ||||
|         # SEND THIS BACK TO THE BROWSER | ||||
|  | ||||
|         output = json.dumps({'screenshot': "data:image/jpeg;base64,{}".format( | ||||
|             base64.b64encode(state[0]).decode('ascii')), | ||||
|             'xpath_data': state[1], | ||||
|             'session_age_start': browsersteps_sessions[browsersteps_session_id]['browserstepper'].age_start, | ||||
|             'browser_time_remaining': round(remaining) | ||||
|         }) | ||||
|         output = { | ||||
|             "screenshot": f"data:image/jpeg;base64,{base64.b64encode(screenshot).decode('ascii')}", | ||||
|             "xpath_data": xpath_data, | ||||
|             "session_age_start": browsersteps_sessions[browsersteps_session_id]['browserstepper'].age_start, | ||||
|             "browser_time_remaining": round(remaining) | ||||
|         } | ||||
|         json_data = json.dumps(output) | ||||
|  | ||||
|         with os.fdopen(tmp_fd, 'w') as f: | ||||
|             f.write(output) | ||||
|         # Generate an ETag (hash of the response body) | ||||
|         etag_hash = hashlib.md5(json_data.encode('utf-8')).hexdigest() | ||||
|  | ||||
|         response = make_response(send_file(path_or_file=tmp_file, | ||||
|                                            mimetype='application/json; charset=UTF-8', | ||||
|                                            etag=True)) | ||||
|         # No longer needed | ||||
|         os.unlink(tmp_file) | ||||
|         # Create the response with ETag | ||||
|         response = Response(json_data, mimetype="application/json; charset=UTF-8") | ||||
|         response.set_etag(etag_hash) | ||||
|  | ||||
|         return response | ||||
|  | ||||
|   | ||||
| @@ -1,14 +1,15 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import os | ||||
| import time | ||||
| import re | ||||
| from random import randint | ||||
| from loguru import logger | ||||
|  | ||||
| from changedetectionio.content_fetchers.helpers import capture_stitched_together_full_page, SCREENSHOT_SIZE_STITCH_THRESHOLD | ||||
| from changedetectionio.content_fetchers.base import manage_user_agent | ||||
| from changedetectionio.safe_jinja import render as jinja_render | ||||
|  | ||||
|  | ||||
|  | ||||
| # Two flags, tell the JS which of the "Selector" or "Value" field should be enabled in the front end | ||||
| # 0- off, 1- on | ||||
| browser_step_ui_config = {'Choose one': '0 0', | ||||
| @@ -31,6 +32,7 @@ browser_step_ui_config = {'Choose one': '0 0', | ||||
| #                          'Extract text and use as filter': '1 0', | ||||
|                           'Goto site': '0 0', | ||||
|                           'Goto URL': '0 1', | ||||
|                           'Make all child elements visible': '1 0', | ||||
|                           'Press Enter': '0 0', | ||||
|                           'Select by label': '1 1', | ||||
|                           'Scroll down': '0 0', | ||||
| @@ -38,6 +40,7 @@ browser_step_ui_config = {'Choose one': '0 0', | ||||
|                           'Wait for seconds': '0 1', | ||||
|                           'Wait for text': '0 1', | ||||
|                           'Wait for text in element': '1 1', | ||||
|                           'Remove elements': '1 0', | ||||
|                           #                          'Press Page Down': '0 0', | ||||
|                           #                          'Press Page Up': '0 0', | ||||
|                           # weird bug, come back to it later | ||||
| @@ -52,6 +55,8 @@ class steppable_browser_interface(): | ||||
|     page = None | ||||
|     start_url = None | ||||
|  | ||||
|     action_timeout = 10 * 1000 | ||||
|  | ||||
|     def __init__(self, start_url): | ||||
|         self.start_url = start_url | ||||
|  | ||||
| @@ -102,7 +107,7 @@ class steppable_browser_interface(): | ||||
|             return | ||||
|         elem = self.page.get_by_text(value) | ||||
|         if elem.count(): | ||||
|             elem.first.click(delay=randint(200, 500), timeout=3000) | ||||
|             elem.first.click(delay=randint(200, 500), timeout=self.action_timeout) | ||||
|  | ||||
|     def action_click_element_containing_text_if_exists(self, selector=None, value=''): | ||||
|         logger.debug("Clicking element containing text if exists") | ||||
| @@ -111,7 +116,7 @@ class steppable_browser_interface(): | ||||
|         elem = self.page.get_by_text(value) | ||||
|         logger.debug(f"Clicking element containing text - {elem.count()} elements found") | ||||
|         if elem.count(): | ||||
|             elem.first.click(delay=randint(200, 500), timeout=3000) | ||||
|             elem.first.click(delay=randint(200, 500), timeout=self.action_timeout) | ||||
|         else: | ||||
|             return | ||||
|  | ||||
| @@ -119,7 +124,7 @@ class steppable_browser_interface(): | ||||
|         if not len(selector.strip()): | ||||
|             return | ||||
|  | ||||
|         self.page.fill(selector, value, timeout=10 * 1000) | ||||
|         self.page.fill(selector, value, timeout=self.action_timeout) | ||||
|  | ||||
|     def action_execute_js(self, selector, value): | ||||
|         response = self.page.evaluate(value) | ||||
| @@ -130,7 +135,7 @@ class steppable_browser_interface(): | ||||
|         if not len(selector.strip()): | ||||
|             return | ||||
|  | ||||
|         self.page.click(selector=selector, timeout=30 * 1000, delay=randint(200, 500)) | ||||
|         self.page.click(selector=selector, timeout=self.action_timeout + 20 * 1000, delay=randint(200, 500)) | ||||
|  | ||||
|     def action_click_element_if_exists(self, selector, value): | ||||
|         import playwright._impl._errors as _api_types | ||||
| @@ -138,7 +143,7 @@ class steppable_browser_interface(): | ||||
|         if not len(selector.strip()): | ||||
|             return | ||||
|         try: | ||||
|             self.page.click(selector, timeout=10 * 1000, delay=randint(200, 500)) | ||||
|             self.page.click(selector, timeout=self.action_timeout, delay=randint(200, 500)) | ||||
|         except _api_types.TimeoutError as e: | ||||
|             return | ||||
|         except _api_types.Error as e: | ||||
| @@ -185,11 +190,29 @@ class steppable_browser_interface(): | ||||
|         self.page.keyboard.press("PageDown", delay=randint(200, 500)) | ||||
|  | ||||
|     def action_check_checkbox(self, selector, value): | ||||
|         self.page.locator(selector).check(timeout=1000) | ||||
|         self.page.locator(selector).check(timeout=self.action_timeout) | ||||
|  | ||||
|     def action_uncheck_checkbox(self, selector, value): | ||||
|         self.page.locator(selector, timeout=1000).uncheck(timeout=1000) | ||||
|         self.page.locator(selector).uncheck(timeout=self.action_timeout) | ||||
|  | ||||
|     def action_remove_elements(self, selector, value): | ||||
|         """Removes all elements matching the given selector from the DOM.""" | ||||
|         self.page.locator(selector).evaluate_all("els => els.forEach(el => el.remove())") | ||||
|  | ||||
|     def action_make_all_child_elements_visible(self, selector, value): | ||||
|         """Recursively makes all child elements inside the given selector fully visible.""" | ||||
|         self.page.locator(selector).locator("*").evaluate_all(""" | ||||
|             els => els.forEach(el => { | ||||
|                 el.style.display = 'block';   // Forces it to be displayed | ||||
|                 el.style.visibility = 'visible';   // Ensures it's not hidden | ||||
|                 el.style.opacity = '1';   // Fully opaque | ||||
|                 el.style.position = 'relative';   // Avoids 'absolute' hiding | ||||
|                 el.style.height = 'auto';   // Expands collapsed elements | ||||
|                 el.style.width = 'auto';   // Ensures full visibility | ||||
|                 el.removeAttribute('hidden');   // Removes hidden attribute | ||||
|                 el.classList.remove('hidden', 'd-none');  // Removes common CSS hidden classes | ||||
|             }) | ||||
|         """) | ||||
|  | ||||
| # Responsible for maintaining a live 'context' with the chrome CDP | ||||
| # @todo - how long do contexts live for anyway? | ||||
| @@ -257,6 +280,7 @@ class browsersteps_live_ui(steppable_browser_interface): | ||||
|         logger.debug(f"Time to browser setup {time.time()-now:.2f}s") | ||||
|         self.page.wait_for_timeout(1 * 1000) | ||||
|  | ||||
|  | ||||
|     def mark_as_closed(self): | ||||
|         logger.debug("Page closed, cleaning up..") | ||||
|  | ||||
| @@ -274,39 +298,30 @@ class browsersteps_live_ui(steppable_browser_interface): | ||||
|         now = time.time() | ||||
|         self.page.wait_for_timeout(1 * 1000) | ||||
|  | ||||
|         # The actual screenshot | ||||
|         screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=40) | ||||
|  | ||||
|         full_height = self.page.evaluate("document.documentElement.scrollHeight") | ||||
|  | ||||
|         if full_height >= SCREENSHOT_SIZE_STITCH_THRESHOLD: | ||||
|             logger.warning(f"Page full Height: {full_height}px longer than {SCREENSHOT_SIZE_STITCH_THRESHOLD}px, using 'stitched screenshot method'.") | ||||
|             screenshot = capture_stitched_together_full_page(self.page) | ||||
|         else: | ||||
|             screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=40) | ||||
|  | ||||
|         logger.debug(f"Time to get screenshot from browser {time.time() - now:.2f}s") | ||||
|  | ||||
|         now = time.time() | ||||
|         self.page.evaluate("var include_filters=''") | ||||
|         # Go find the interactive elements | ||||
|         # @todo in the future, something smarter that can scan for elements with .click/focus etc event handlers? | ||||
|         elements = 'a,button,input,select,textarea,i,th,td,p,li,h1,h2,h3,h4,div,span' | ||||
|         xpath_element_js = xpath_element_js.replace('%ELEMENTS%', elements) | ||||
|  | ||||
|         xpath_data = self.page.evaluate("async () => {" + xpath_element_js + "}") | ||||
|         # So the JS will find the smallest one first | ||||
|         xpath_data['size_pos'] = sorted(xpath_data['size_pos'], key=lambda k: k['width'] * k['height'], reverse=True) | ||||
|         logger.debug(f"Time to complete get_current_state of browser {time.time()-now:.2f}s") | ||||
|         # except | ||||
|         logger.debug(f"Time to scrape xpath element data in browser {time.time()-now:.2f}s") | ||||
|  | ||||
|         # playwright._impl._api_types.Error: Browser closed. | ||||
|         # @todo show some countdown timer? | ||||
|         return (screenshot, xpath_data) | ||||
|  | ||||
|     def request_visualselector_data(self): | ||||
|         """ | ||||
|         Does the same that the playwright operation in content_fetcher does | ||||
|         This is used to just bump the VisualSelector data so it' ready to go if they click on the tab | ||||
|         @todo refactor and remove duplicate code, add include_filters | ||||
|         :param xpath_data: | ||||
|         :param screenshot: | ||||
|         :param current_include_filters: | ||||
|         :return: | ||||
|         """ | ||||
|         import importlib.resources | ||||
|         self.page.evaluate("var include_filters=''") | ||||
|         xpath_element_js = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('xpath_element_scraper.js').read_text() | ||||
|         from changedetectionio.content_fetchers import visualselector_xpath_selectors | ||||
|         xpath_element_js = xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) | ||||
|         xpath_data = self.page.evaluate("async () => {" + xpath_element_js + "}") | ||||
|         screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=int(os.getenv("SCREENSHOT_QUALITY", 72))) | ||||
|  | ||||
|         return (screenshot, xpath_data) | ||||
|   | ||||
| @@ -1,4 +1,7 @@ | ||||
| import importlib | ||||
| from concurrent.futures import ThreadPoolExecutor | ||||
|  | ||||
| from changedetectionio.processors.text_json_diff.processor import FilterNotFoundInResponse | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
|  | ||||
| from functools import wraps | ||||
| @@ -30,7 +33,6 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     def long_task(uuid, preferred_proxy): | ||||
|         import time | ||||
|         from changedetectionio.content_fetchers import exceptions as content_fetcher_exceptions | ||||
|         from changedetectionio.processors.text_json_diff import text_json_diff | ||||
|         from changedetectionio.safe_jinja import render as jinja_render | ||||
|  | ||||
|         status = {'status': '', 'length': 0, 'text': ''} | ||||
| @@ -38,8 +40,12 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|         contents = '' | ||||
|         now = time.time() | ||||
|         try: | ||||
|             update_handler = text_json_diff.perform_site_check(datastore=datastore, watch_uuid=uuid) | ||||
|             update_handler.call_browser() | ||||
|             processor_module = importlib.import_module("changedetectionio.processors.text_json_diff.processor") | ||||
|             update_handler = processor_module.perform_site_check(datastore=datastore, | ||||
|                                                                  watch_uuid=uuid | ||||
|                                                                  ) | ||||
|  | ||||
|             update_handler.call_browser(preferred_proxy_id=preferred_proxy) | ||||
|         # title, size is len contents not len xfer | ||||
|         except content_fetcher_exceptions.Non200ErrorCodeReceived as e: | ||||
|             if e.status_code == 404: | ||||
| @@ -48,7 +54,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|                 status.update({'status': 'ERROR', 'length': len(contents), 'text': f"{e.status_code} - Access denied"}) | ||||
|             else: | ||||
|                 status.update({'status': 'ERROR', 'length': len(contents), 'text': f"Status code: {e.status_code}"}) | ||||
|         except text_json_diff.FilterNotFoundInResponse: | ||||
|         except FilterNotFoundInResponse: | ||||
|             status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but CSS/xPath filter not found (page changed layout?)"}) | ||||
|         except content_fetcher_exceptions.EmptyReply as e: | ||||
|             if e.status_code == 403 or e.status_code == 401: | ||||
|   | ||||
							
								
								
									
										74
									
								
								changedetectionio/blueprint/imports/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,74 @@ | ||||
| from flask import Blueprint, request, redirect, url_for, flash, render_template | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from changedetectionio.auth_decorator import login_optionally_required | ||||
| from changedetectionio.blueprint.imports.importer import ( | ||||
|     import_url_list,  | ||||
|     import_distill_io_json,  | ||||
|     import_xlsx_wachete,  | ||||
|     import_xlsx_custom | ||||
| ) | ||||
|  | ||||
| def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMetaData): | ||||
|     import_blueprint = Blueprint('imports', __name__, template_folder="templates") | ||||
|      | ||||
|     @import_blueprint.route("/import", methods=['GET', 'POST']) | ||||
|     @login_optionally_required | ||||
|     def import_page(): | ||||
|         remaining_urls = [] | ||||
|         from changedetectionio import forms | ||||
|  | ||||
|         if request.method == 'POST': | ||||
|             # URL List import | ||||
|             if request.values.get('urls') and len(request.values.get('urls').strip()): | ||||
|                 # Import and push into the queue for immediate update check | ||||
|                 importer_handler = import_url_list() | ||||
|                 importer_handler.run(data=request.values.get('urls'), flash=flash, datastore=datastore, processor=request.values.get('processor', 'text_json_diff')) | ||||
|                 for uuid in importer_handler.new_uuids: | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|  | ||||
|                 if len(importer_handler.remaining_data) == 0: | ||||
|                     return redirect(url_for('watchlist.index')) | ||||
|                 else: | ||||
|                     remaining_urls = importer_handler.remaining_data | ||||
|  | ||||
|             # Distill.io import | ||||
|             if request.values.get('distill-io') and len(request.values.get('distill-io').strip()): | ||||
|                 # Import and push into the queue for immediate update check | ||||
|                 d_importer = import_distill_io_json() | ||||
|                 d_importer.run(data=request.values.get('distill-io'), flash=flash, datastore=datastore) | ||||
|                 for uuid in d_importer.new_uuids: | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|  | ||||
|             # XLSX importer | ||||
|             if request.files and request.files.get('xlsx_file'): | ||||
|                 file = request.files['xlsx_file'] | ||||
|  | ||||
|                 if request.values.get('file_mapping') == 'wachete': | ||||
|                     w_importer = import_xlsx_wachete() | ||||
|                     w_importer.run(data=file, flash=flash, datastore=datastore) | ||||
|                 else: | ||||
|                     w_importer = import_xlsx_custom() | ||||
|                     # Building mapping of col # to col # type | ||||
|                     map = {} | ||||
|                     for i in range(10): | ||||
|                         c = request.values.get(f"custom_xlsx[col_{i}]") | ||||
|                         v = request.values.get(f"custom_xlsx[col_type_{i}]") | ||||
|                         if c and v: | ||||
|                             map[int(c)] = v | ||||
|  | ||||
|                     w_importer.import_profile = map | ||||
|                     w_importer.run(data=file, flash=flash, datastore=datastore) | ||||
|  | ||||
|                 for uuid in w_importer.new_uuids: | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|  | ||||
|         # Could be some remaining, or we could be on GET | ||||
|         form = forms.importForm(formdata=request.form if request.method == 'POST' else None) | ||||
|         output = render_template("import.html", | ||||
|                                 form=form, | ||||
|                                 import_url_list_remaining="\n".join(remaining_urls), | ||||
|                                 original_distill_json='' | ||||
|                                 ) | ||||
|         return output | ||||
|  | ||||
|     return import_blueprint | ||||
| @@ -1,6 +1,5 @@ | ||||
| from abc import ABC, abstractmethod | ||||
| from abc import abstractmethod | ||||
| import time | ||||
| import validators | ||||
| from wtforms import ValidationError | ||||
| from loguru import logger | ||||
| 
 | ||||
| @@ -241,7 +240,7 @@ class import_xlsx_custom(Importer): | ||||
|             return | ||||
| 
 | ||||
|         # @todo cehck atleast 2 rows, same in other method | ||||
|         from .forms import validate_url | ||||
|         from changedetectionio.forms import validate_url | ||||
|         row_i = 1 | ||||
| 
 | ||||
|         try: | ||||
| @@ -300,4 +299,4 @@ class import_xlsx_custom(Importer): | ||||
|             row_i += 1 | ||||
| 
 | ||||
|         flash( | ||||
|             "{} imported from custom .xlsx in {:.2f}s".format(len(self.new_uuids), time.time() - now)) | ||||
|             "{} imported from custom .xlsx in {:.2f}s".format(len(self.new_uuids), time.time() - now)) | ||||
| @@ -13,29 +13,27 @@ | ||||
|     </div> | ||||
| 
 | ||||
|     <div class="box-wrap inner"> | ||||
|         <form class="pure-form" action="{{url_for('import_page')}}" method="POST" enctype="multipart/form-data"> | ||||
|         <form class="pure-form" action="{{url_for('imports.import_page')}}" method="POST" enctype="multipart/form-data"> | ||||
|             <input type="hidden" name="csrf_token" value="{{ csrf_token() }}"> | ||||
|             <div class="tab-pane-inner" id="url-list"> | ||||
|                     <legend> | ||||
|                 <div class="pure-control-group"> | ||||
|                         Enter one URL per line, and optionally add tags for each URL after a space, delineated by comma | ||||
|                         (,): | ||||
|                         <br> | ||||
|                         <code>https://example.com tag1, tag2, last tag</code> | ||||
|                         <br> | ||||
|                         <p><strong>Example:  </strong><code>https://example.com tag1, tag2, last tag</code></p> | ||||
|                         URLs which do not pass validation will stay in the textarea. | ||||
|                     </legend> | ||||
|                 </div> | ||||
|                 {{ render_field(form.processor, class="processor") }} | ||||
| 
 | ||||
|                  | ||||
|                 <div class="pure-control-group"> | ||||
|                     <textarea name="urls" class="pure-input-1-2" placeholder="https://" | ||||
|                               style="width: 100%; | ||||
|                                 font-family:monospace; | ||||
|                                 white-space: pre; | ||||
|                                 overflow-wrap: normal; | ||||
|                                 overflow-x: scroll;" rows="25">{{ import_url_list_remaining }}</textarea> | ||||
| 
 | ||||
| <div id="quick-watch-processor-type"> | ||||
| 
 | ||||
|                     </div> | ||||
|                  </div> | ||||
|                  <div id="quick-watch-processor-type"></div> | ||||
| 
 | ||||
|             </div> | ||||
| 
 | ||||
| @@ -43,7 +41,7 @@ | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
|                     <legend> | ||||
|                     <div class="pure-control-group"> | ||||
|                         Copy and Paste your Distill.io watch 'export' file, this should be a JSON file.<br> | ||||
|                         This is <i>experimental</i>, supported fields are <code>name</code>, <code>uri</code>, <code>tags</code>, <code>config:selections</code>, the rest (including <code>schedule</code>) are ignored. | ||||
|                         <br> | ||||
| @@ -51,7 +49,7 @@ | ||||
|                         How to export? <a href="https://distill.io/docs/web-monitor/how-export-and-import-monitors/">https://distill.io/docs/web-monitor/how-export-and-import-monitors/</a><br> | ||||
|                         Be sure to set your default fetcher to Chrome if required.<br> | ||||
|                         </p> | ||||
|                     </legend> | ||||
|                     </div> | ||||
| 
 | ||||
| 
 | ||||
|                     <textarea name="distill-io" class="pure-input-1-2" style="width: 100%; | ||||
| @@ -122,4 +120,4 @@ | ||||
|     </div> | ||||
| </div> | ||||
| 
 | ||||
| {% endblock %} | ||||
| {% endblock %} | ||||
| @@ -19,14 +19,14 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q: PriorityQueue | ||||
|         datastore.data['watching'][uuid]['track_ldjson_price_data'] = PRICE_DATA_TRACK_ACCEPT | ||||
|         datastore.data['watching'][uuid]['processor'] = 'restock_diff' | ||||
|         datastore.data['watching'][uuid].clear_watch() | ||||
|         update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False})) | ||||
|         return redirect(url_for("index")) | ||||
|         update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|         return redirect(url_for("watchlist.index")) | ||||
|  | ||||
|     @login_required | ||||
|     @price_data_follower_blueprint.route("/<string:uuid>/reject", methods=['GET']) | ||||
|     def reject(uuid): | ||||
|         datastore.data['watching'][uuid]['track_ldjson_price_data'] = PRICE_DATA_TRACK_REJECT | ||||
|         return redirect(url_for("index")) | ||||
|         return redirect(url_for("watchlist.index")) | ||||
|  | ||||
|  | ||||
|     return price_data_follower_blueprint | ||||
|   | ||||
							
								
								
									
										1
									
								
								changedetectionio/blueprint/rss/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1 @@ | ||||
| RSS_FORMAT_TYPES = [('plaintext', 'Plain text'), ('html', 'HTML Color')] | ||||
							
								
								
									
										147
									
								
								changedetectionio/blueprint/rss/blueprint.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,147 @@ | ||||
|  | ||||
| from changedetectionio.safe_jinja import render as jinja_render | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from feedgen.feed import FeedGenerator | ||||
| from flask import Blueprint, make_response, request, url_for, redirect | ||||
| from loguru import logger | ||||
| import datetime | ||||
| import pytz | ||||
| import re | ||||
| import time | ||||
|  | ||||
|  | ||||
| BAD_CHARS_REGEX=r'[\x00-\x08\x0B\x0C\x0E-\x1F]' | ||||
|  | ||||
| # Anything that is not text/UTF-8 should be stripped before it breaks feedgen (such as binary data etc) | ||||
| def scan_invalid_chars_in_rss(content): | ||||
|     for match in re.finditer(BAD_CHARS_REGEX, content): | ||||
|         i = match.start() | ||||
|         bad_char = content[i] | ||||
|         hex_value = f"0x{ord(bad_char):02x}" | ||||
|         # Grab context | ||||
|         start = max(0, i - 20) | ||||
|         end = min(len(content), i + 21) | ||||
|         context = content[start:end].replace('\n', '\\n').replace('\r', '\\r') | ||||
|         logger.warning(f"Invalid char {hex_value} at pos {i}: ...{context}...") | ||||
|         # First match is enough | ||||
|         return True | ||||
|  | ||||
|     return False | ||||
|  | ||||
|  | ||||
| def clean_entry_content(content): | ||||
|     cleaned = re.sub(BAD_CHARS_REGEX, '', content) | ||||
|     return cleaned | ||||
|  | ||||
| def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     rss_blueprint = Blueprint('rss', __name__) | ||||
|  | ||||
|     # Some RSS reader situations ended up with rss/ (forward slash after RSS) due | ||||
|     # to some earlier blueprint rerouting work, it should goto feed. | ||||
|     @rss_blueprint.route("/", methods=['GET']) | ||||
|     def extraslash(): | ||||
|         return redirect(url_for('rss.feed')) | ||||
|  | ||||
|     # Import the login decorator if needed | ||||
|     # from changedetectionio.auth_decorator import login_optionally_required | ||||
|     @rss_blueprint.route("", methods=['GET']) | ||||
|     def feed(): | ||||
|         now = time.time() | ||||
|         # Always requires token set | ||||
|         app_rss_token = datastore.data['settings']['application'].get('rss_access_token') | ||||
|         rss_url_token = request.args.get('token') | ||||
|         if rss_url_token != app_rss_token: | ||||
|             return "Access denied, bad token", 403 | ||||
|  | ||||
|         from changedetectionio import diff | ||||
|         limit_tag = request.args.get('tag', '').lower().strip() | ||||
|         # Be sure limit_tag is a uuid | ||||
|         for uuid, tag in datastore.data['settings']['application'].get('tags', {}).items(): | ||||
|             if limit_tag == tag.get('title', '').lower().strip(): | ||||
|                 limit_tag = uuid | ||||
|  | ||||
|         # Sort by last_changed and add the uuid which is usually the key.. | ||||
|         sorted_watches = [] | ||||
|  | ||||
|         # @todo needs a .itemsWithTag() or something - then we can use that in Jinaj2 and throw this away | ||||
|         for uuid, watch in datastore.data['watching'].items(): | ||||
|             # @todo tag notification_muted skip also (improve Watch model) | ||||
|             if datastore.data['settings']['application'].get('rss_hide_muted_watches') and watch.get('notification_muted'): | ||||
|                 continue | ||||
|             if limit_tag and not limit_tag in watch['tags']: | ||||
|                 continue | ||||
|             watch['uuid'] = uuid | ||||
|             sorted_watches.append(watch) | ||||
|  | ||||
|         sorted_watches.sort(key=lambda x: x.last_changed, reverse=False) | ||||
|  | ||||
|         fg = FeedGenerator() | ||||
|         fg.title('changedetection.io') | ||||
|         fg.description('Feed description') | ||||
|         fg.link(href='https://changedetection.io') | ||||
|  | ||||
|         html_colour_enable = False | ||||
|         if datastore.data['settings']['application'].get('rss_content_format') == 'html': | ||||
|             html_colour_enable = True | ||||
|  | ||||
|         for watch in sorted_watches: | ||||
|  | ||||
|             dates = list(watch.history.keys()) | ||||
|             # Re #521 - Don't bother processing this one if theres less than 2 snapshots, means we never had a change detected. | ||||
|             if len(dates) < 2: | ||||
|                 continue | ||||
|  | ||||
|             if not watch.viewed: | ||||
|                 # Re #239 - GUID needs to be individual for each event | ||||
|                 # @todo In the future make this a configurable link back (see work on BASE_URL https://github.com/dgtlmoon/changedetection.io/pull/228) | ||||
|                 guid = "{}/{}".format(watch['uuid'], watch.last_changed) | ||||
|                 fe = fg.add_entry() | ||||
|  | ||||
|                 # Include a link to the diff page, they will have to login here to see if password protection is enabled. | ||||
|                 # Description is the page you watch, link takes you to the diff JS UI page | ||||
|                 # Dict val base_url will get overriden with the env var if it is set. | ||||
|                 ext_base_url = datastore.data['settings']['application'].get('active_base_url') | ||||
|                 # @todo fix | ||||
|  | ||||
|                 # Because we are called via whatever web server, flask should figure out the right path ( | ||||
|                 diff_link = {'href': url_for('ui.ui_views.diff_history_page', uuid=watch['uuid'], _external=True)} | ||||
|  | ||||
|                 fe.link(link=diff_link) | ||||
|  | ||||
|                 # @todo watch should be a getter - watch.get('title') (internally if URL else..) | ||||
|  | ||||
|                 watch_title = watch.get('title') if watch.get('title') else watch.get('url') | ||||
|                 fe.title(title=watch_title) | ||||
|                 try: | ||||
|  | ||||
|                     html_diff = diff.render_diff(previous_version_file_contents=watch.get_history_snapshot(dates[-2]), | ||||
|                                                  newest_version_file_contents=watch.get_history_snapshot(dates[-1]), | ||||
|                                                  include_equal=False, | ||||
|                                                  line_feed_sep="<br>", | ||||
|                                                  html_colour=html_colour_enable | ||||
|                                                  ) | ||||
|                 except FileNotFoundError as e: | ||||
|                     html_diff = f"History snapshot file for watch {watch.get('uuid')}@{watch.last_changed} - '{watch.get('title')} not found." | ||||
|  | ||||
|                 # @todo Make this configurable and also consider html-colored markup | ||||
|                 # @todo User could decide if <link> goes to the diff page, or to the watch link | ||||
|                 rss_template = "<html><body>\n<h4><a href=\"{{watch_url}}\">{{watch_title}}</a></h4>\n<p>{{html_diff}}</p>\n</body></html>\n" | ||||
|  | ||||
|                 content = jinja_render(template_str=rss_template, watch_title=watch_title, html_diff=html_diff, watch_url=watch.link) | ||||
|  | ||||
|                 # Out of range chars could also break feedgen | ||||
|                 if scan_invalid_chars_in_rss(content): | ||||
|                     content = clean_entry_content(content) | ||||
|  | ||||
|                 fe.content(content=content, type='CDATA') | ||||
|                 fe.guid(guid, permalink=False) | ||||
|                 dt = datetime.datetime.fromtimestamp(int(watch.newest_history_key)) | ||||
|                 dt = dt.replace(tzinfo=pytz.UTC) | ||||
|                 fe.pubDate(dt) | ||||
|  | ||||
|         response = make_response(fg.rss_str()) | ||||
|         response.headers.set('Content-Type', 'application/rss+xml;charset=utf-8') | ||||
|         logger.trace(f"RSS generated in {time.time() - now:.3f}s") | ||||
|         return response | ||||
|  | ||||
|     return rss_blueprint | ||||
							
								
								
									
										120
									
								
								changedetectionio/blueprint/settings/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,120 @@ | ||||
| import os | ||||
| from copy import deepcopy | ||||
| from datetime import datetime | ||||
| from zoneinfo import ZoneInfo, available_timezones | ||||
| import secrets | ||||
| import flask_login | ||||
| from flask import Blueprint, render_template, request, redirect, url_for, flash | ||||
|  | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from changedetectionio.auth_decorator import login_optionally_required | ||||
|  | ||||
|  | ||||
| def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     settings_blueprint = Blueprint('settings', __name__, template_folder="templates") | ||||
|  | ||||
|     @settings_blueprint.route("", methods=['GET', "POST"]) | ||||
|     @login_optionally_required | ||||
|     def settings_page(): | ||||
|         from changedetectionio import forms | ||||
|  | ||||
|         default = deepcopy(datastore.data['settings']) | ||||
|         if datastore.proxy_list is not None: | ||||
|             available_proxies = list(datastore.proxy_list.keys()) | ||||
|             # When enabled | ||||
|             system_proxy = datastore.data['settings']['requests']['proxy'] | ||||
|             # In the case it doesnt exist anymore | ||||
|             if not system_proxy in available_proxies: | ||||
|                 system_proxy = None | ||||
|  | ||||
|             default['requests']['proxy'] = system_proxy if system_proxy is not None else available_proxies[0] | ||||
|             # Used by the form handler to keep or remove the proxy settings | ||||
|             default['proxy_list'] = available_proxies[0] | ||||
|  | ||||
|         # Don't use form.data on POST so that it doesnt overrid the checkbox status from the POST status | ||||
|         form = forms.globalSettingsForm(formdata=request.form if request.method == 'POST' else None, | ||||
|                                         data=default, | ||||
|                                         extra_notification_tokens=datastore.get_unique_notification_tokens_available() | ||||
|                                         ) | ||||
|  | ||||
|         # Remove the last option 'System default' | ||||
|         form.application.form.notification_format.choices.pop() | ||||
|  | ||||
|         if datastore.proxy_list is None: | ||||
|             # @todo - Couldn't get setattr() etc dynamic addition working, so remove it instead | ||||
|             del form.requests.form.proxy | ||||
|         else: | ||||
|             form.requests.form.proxy.choices = [] | ||||
|             for p in datastore.proxy_list: | ||||
|                 form.requests.form.proxy.choices.append(tuple((p, datastore.proxy_list[p]['label']))) | ||||
|  | ||||
|         if request.method == 'POST': | ||||
|             # Password unset is a GET, but we can lock the session to a salted env password to always need the password | ||||
|             if form.application.form.data.get('removepassword_button', False): | ||||
|                 # SALTED_PASS means the password is "locked" to what we set in the Env var | ||||
|                 if not os.getenv("SALTED_PASS", False): | ||||
|                     datastore.remove_password() | ||||
|                     flash("Password protection removed.", 'notice') | ||||
|                     flask_login.logout_user() | ||||
|                     return redirect(url_for('settings.settings_page')) | ||||
|  | ||||
|             if form.validate(): | ||||
|                 # Don't set password to False when a password is set - should be only removed with the `removepassword` button | ||||
|                 app_update = dict(deepcopy(form.data['application'])) | ||||
|  | ||||
|                 # Never update password with '' or False (Added by wtforms when not in submission) | ||||
|                 if 'password' in app_update and not app_update['password']: | ||||
|                     del (app_update['password']) | ||||
|  | ||||
|                 datastore.data['settings']['application'].update(app_update) | ||||
|                 datastore.data['settings']['requests'].update(form.data['requests']) | ||||
|  | ||||
|                 if not os.getenv("SALTED_PASS", False) and len(form.application.form.password.encrypted_password): | ||||
|                     datastore.data['settings']['application']['password'] = form.application.form.password.encrypted_password | ||||
|                     datastore.needs_write_urgent = True | ||||
|                     flash("Password protection enabled.", 'notice') | ||||
|                     flask_login.logout_user() | ||||
|                     return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|                 datastore.needs_write_urgent = True | ||||
|                 flash("Settings updated.") | ||||
|  | ||||
|             else: | ||||
|                 flash("An error occurred, please see below.", "error") | ||||
|  | ||||
|         # Convert to ISO 8601 format, all date/time relative events stored as UTC time | ||||
|         utc_time = datetime.now(ZoneInfo("UTC")).isoformat() | ||||
|  | ||||
|         output = render_template("settings.html", | ||||
|                                 api_key=datastore.data['settings']['application'].get('api_access_token'), | ||||
|                                 available_timezones=sorted(available_timezones()), | ||||
|                                 emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False), | ||||
|                                 extra_notification_token_placeholder_info=datastore.get_unique_notification_token_placeholders_available(), | ||||
|                                 form=form, | ||||
|                                 hide_remove_pass=os.getenv("SALTED_PASS", False), | ||||
|                                 min_system_recheck_seconds=int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 3)), | ||||
|                                 settings_application=datastore.data['settings']['application'], | ||||
|                                 timezone_default_config=datastore.data['settings']['application'].get('timezone'), | ||||
|                                 utc_time=utc_time, | ||||
|                                 ) | ||||
|  | ||||
|         return output | ||||
|  | ||||
|     @settings_blueprint.route("/reset-api-key", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def settings_reset_api_key(): | ||||
|         secret = secrets.token_hex(16) | ||||
|         datastore.data['settings']['application']['api_access_token'] = secret | ||||
|         datastore.needs_write_urgent = True | ||||
|         flash("API Key was regenerated.") | ||||
|         return redirect(url_for('settings.settings_page')+'#api') | ||||
|          | ||||
|     @settings_blueprint.route("/notification-logs", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def notification_logs(): | ||||
|         from changedetectionio.flask_app import notification_debug_log | ||||
|         output = render_template("notification-log.html", | ||||
|                                logs=notification_debug_log if len(notification_debug_log) else ["Notification logs are empty - no notifications sent yet."]) | ||||
|         return output | ||||
|  | ||||
|     return settings_blueprint | ||||
| @@ -1,18 +1,20 @@ | ||||
| {% extends 'base.html' %} | ||||
| 
 | ||||
| {% block content %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form %} | ||||
| {% from '_common_fields.html' import render_common_settings_form %} | ||||
| <script> | ||||
|     const notification_base_url="{{url_for('ajax_callback_send_notification_test', mode="global-settings")}}"; | ||||
|     const notification_base_url="{{url_for('ui.ui_notification.ajax_callback_send_notification_test', mode="global-settings")}}"; | ||||
| {% if emailprefix %} | ||||
|     const email_notification_prefix=JSON.parse('{{emailprefix|tojson}}'); | ||||
| {% endif %} | ||||
| </script> | ||||
| <script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='plugins.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='global-settings.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='scheduler.js')}}" defer></script> | ||||
| <div class="edit-form"> | ||||
|     <div class="tabs collapsable"> | ||||
|         <ul> | ||||
| @@ -21,17 +23,24 @@ | ||||
|             <li class="tab"><a href="#fetching">Fetching</a></li> | ||||
|             <li class="tab"><a href="#filters">Global Filters</a></li> | ||||
|             <li class="tab"><a href="#api">API</a></li> | ||||
|             <li class="tab"><a href="#timedate">Time & Date</a></li> | ||||
|             <li class="tab"><a href="#proxies">CAPTCHA & Proxies</a></li> | ||||
|         </ul> | ||||
|     </div> | ||||
|     <div class="box-wrap inner"> | ||||
|         <form class="pure-form pure-form-stacked settings" action="{{url_for('settings_page')}}" method="POST"> | ||||
|         <form class="pure-form pure-form-stacked settings" action="{{url_for('settings.settings_page')}}" method="POST"> | ||||
|             <input type="hidden" name="csrf_token" value="{{ csrf_token() }}" > | ||||
|             <div class="tab-pane-inner" id="general"> | ||||
|                 <fieldset> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.requests.form.time_between_check, class="time-check-widget") }} | ||||
|                         <span class="pure-form-message-inline">Default recheck time for all watches, current system minimum is <i>{{min_system_recheck_seconds}}</i> seconds (<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Misc-system-settings#enviroment-variables">more info</a>).</span> | ||||
|                             <div id="time-between-check-schedule"> | ||||
|                                 <!-- Start Time and End Time --> | ||||
|                                 <div id="limit-between-time"> | ||||
|                                     {{ render_time_schedule_form(form.requests, available_timezones, timezone_default_config) }} | ||||
|                                 </div> | ||||
|                         </div> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.requests.form.jitter_seconds, class="jitter_seconds") }} | ||||
| @@ -69,7 +78,10 @@ | ||||
|                         {{ render_field(form.application.form.pager_size) }} | ||||
|                         <span class="pure-form-message-inline">Number of items per page in the watch overview list, 0 to disable.</span> | ||||
|                     </div> | ||||
| 
 | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.application.form.rss_content_format) }} | ||||
|                         <span class="pure-form-message-inline">Love RSS? Does your reader support HTML? Set it here</span> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_checkbox_field(form.application.form.extract_title_as_title) }} | ||||
|                         <span class="pure-form-message-inline">Note: This will automatically apply to all existing watches.</span> | ||||
| @@ -172,11 +184,11 @@ nav | ||||
|                     <span class="pure-form-message-inline">Note: This is applied globally in addition to the per-watch rules.</span><br> | ||||
|                     <span class="pure-form-message-inline"> | ||||
|                         <ul> | ||||
|                             <li>Matching text will be <strong>ignored</strong> in the text snapshot (you can still see it but it wont trigger a change)</li> | ||||
|                             <li>Note: This is applied globally in addition to the per-watch rules.</li> | ||||
|                             <li>Each line processed separately, any line matching will be ignored (removed before creating the checksum)</li> | ||||
|                             <li>Regular Expression support, wrap the entire line in forward slash <code>/regex/</code></li> | ||||
|                             <li>Changing this will affect the comparison checksum which may trigger an alert</li> | ||||
|                             <li>Use the preview/show current tab to see ignores</li> | ||||
|                         </ul> | ||||
|                      </span> | ||||
|                     </fieldset> | ||||
| @@ -194,7 +206,7 @@ nav | ||||
|                     </div> | ||||
|                 </div> | ||||
|                 <div class="pure-control-group"> | ||||
|                     <a href="{{url_for('settings_reset_api_key')}}" class="pure-button button-small button-cancel">Regenerate API key</a> | ||||
|                     <a href="{{url_for('settings.settings_reset_api_key')}}" class="pure-button button-small button-cancel">Regenerate API key</a> | ||||
|                 </div> | ||||
|                 <div class="pure-control-group"> | ||||
|                     <h4>Chrome Extension</h4> | ||||
| @@ -205,12 +217,29 @@ nav | ||||
|                         <a id="chrome-extension-link" | ||||
|                            title="Try our new Chrome Extension!" | ||||
|                            href="https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop"> | ||||
|                             <img src="{{ url_for('static_content', group='images', filename='Google-Chrome-icon.png') }}" alt="Chrome"> | ||||
|                             <img alt="Chrome store icon" src="{{ url_for('static_content', group='images', filename='google-chrome-icon.png') }}" alt="Chrome"> | ||||
|                             Chrome Webstore | ||||
|                         </a> | ||||
|                     </p> | ||||
|                 </div> | ||||
|             </div> | ||||
|             <div class="tab-pane-inner" id="timedate"> | ||||
|                 <div class="pure-control-group"> | ||||
|                     Ensure the settings below are correct, they are used to manage the time schedule for checking your web page watches. | ||||
|                 </div> | ||||
|                 <div class="pure-control-group"> | ||||
|                     <p><strong>UTC Time & Date from Server:</strong> <span id="utc-time" >{{ utc_time }}</span></p> | ||||
|                     <p><strong>Local Time & Date in Browser:</strong> <span class="local-time" data-utc="{{ utc_time }}"></span></p> | ||||
|                     <p> | ||||
|                        {{ render_field(form.application.form.timezone) }} | ||||
|                         <datalist id="timezones" style="display: none;"> | ||||
|                             {% for tz_name in available_timezones %} | ||||
|                                 <option value="{{ tz_name }}">{{ tz_name }}</option> | ||||
|                             {% endfor %} | ||||
|                         </datalist> | ||||
|                     </p> | ||||
|                 </div> | ||||
|             </div> | ||||
|             <div class="tab-pane-inner" id="proxies"> | ||||
|                 <div id="recommended-proxy"> | ||||
|                     <div> | ||||
| @@ -254,9 +283,7 @@ nav | ||||
|                          | ||||
|                     </div> | ||||
|                 </div> | ||||
|                 <p> | ||||
|                     Your proxy provider may need to whitelist our IP of <code>204.15.192.195</code> | ||||
|                 </p> | ||||
| 
 | ||||
|                <p><strong>Tip</strong>: "Residential" and "Mobile" proxy type can be more successfull than "Data Center" for blocked websites. | ||||
| 
 | ||||
|                 <div class="pure-control-group" id="extra-proxies-setting"> | ||||
| @@ -275,8 +302,8 @@ nav | ||||
|             <div id="actions"> | ||||
|                 <div class="pure-control-group"> | ||||
|                     {{ render_button(form.save_button) }} | ||||
|                     <a href="{{url_for('index')}}" class="pure-button button-small button-cancel">Back</a> | ||||
|                     <a href="{{url_for('clear_all_history')}}" class="pure-button button-small button-cancel">Clear Snapshot History</a> | ||||
|                     <a href="{{url_for('watchlist.index')}}" class="pure-button button-small button-cancel">Back</a> | ||||
|                     <a href="{{url_for('ui.clear_all_history')}}" class="pure-button button-small button-error">Clear Snapshot History</a> | ||||
|                 </div> | ||||
|             </div> | ||||
|         </form> | ||||
| @@ -13,6 +13,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     def tags_overview_page(): | ||||
|         from .form import SingleTag | ||||
|         add_form = SingleTag(request.form) | ||||
|  | ||||
|         sorted_tags = sorted(datastore.data['settings']['application'].get('tags').items(), key=lambda x: x[1]['title']) | ||||
|  | ||||
|         from collections import Counter | ||||
| @@ -104,9 +105,11 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|  | ||||
|         default = datastore.data['settings']['application']['tags'].get(uuid) | ||||
|  | ||||
|         form = group_restock_settings_form(formdata=request.form if request.method == 'POST' else None, | ||||
|         form = group_restock_settings_form( | ||||
|                                        formdata=request.form if request.method == 'POST' else None, | ||||
|                                        data=default, | ||||
|                                        extra_notification_tokens=datastore.get_unique_notification_tokens_available() | ||||
|                                        extra_notification_tokens=datastore.get_unique_notification_tokens_available(), | ||||
|                                        default_system_settings = datastore.data['settings'], | ||||
|                                        ) | ||||
|  | ||||
|         template_args = { | ||||
|   | ||||
| @@ -3,7 +3,7 @@ | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button %} | ||||
| {% from '_common_fields.html' import render_common_settings_form %} | ||||
| <script> | ||||
|     const notification_base_url="{{url_for('ajax_callback_send_notification_test', mode="group-settings")}}"; | ||||
|     const notification_base_url="{{url_for('ui.ui_notification.ajax_callback_send_notification_test', mode="group-settings")}}"; | ||||
| </script> | ||||
|  | ||||
| <script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script> | ||||
| @@ -17,7 +17,6 @@ | ||||
| </script> | ||||
|  | ||||
| <script src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script> | ||||
| <!--<script src="{{url_for('static_content', group='js', filename='limit.js')}}" defer></script>--> | ||||
| <script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script> | ||||
|  | ||||
| <div class="edit-form monospaced-textarea"> | ||||
| @@ -125,7 +124,7 @@ nav | ||||
|                         {% if has_default_notification_urls %} | ||||
|                         <div class="inline-warning"> | ||||
|                             <img class="inline-warning-icon" src="{{url_for('static_content', group='images', filename='notice.svg')}}" alt="Look out!" title="Lookout!" > | ||||
|                             There are <a href="{{ url_for('settings_page')}}#notifications">system-wide notification URLs enabled</a>, this form will override notification settings for this watch only ‐ an empty Notification URL list here will still send notifications. | ||||
|                             There are <a href="{{ url_for('settings.settings_page')}}#notifications">system-wide notification URLs enabled</a>, this form will override notification settings for this watch only ‐ an empty Notification URL list here will still send notifications. | ||||
|                         </div> | ||||
|                         {% endif %} | ||||
|                         <a href="#notifications" id="notification-setting-reset-to-default" class="pure-button button-xsmall" style="right: 20px; top: 20px; position: absolute; background-color: #5f42dd; border-radius: 4px; font-size: 70%; color: #fff">Use system defaults</a> | ||||
|   | ||||
| @@ -47,7 +47,7 @@ | ||||
|                     <a class="link-mute state-{{'on' if tag.notification_muted else 'off'}}" href="{{url_for('tags.mute', uuid=tag.uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications" class="icon icon-mute" ></a> | ||||
|                 </td> | ||||
|                 <td>{{ "{:,}".format(tag_count[uuid]) if uuid in tag_count else 0 }}</td> | ||||
|                 <td class="title-col inline"> <a href="{{url_for('index', tag=uuid) }}">{{ tag.title }}</a></td> | ||||
|                 <td class="title-col inline"> <a href="{{url_for('watchlist.index', tag=uuid) }}">{{ tag.title }}</a></td> | ||||
|                 <td> | ||||
|                     <a class="pure-button pure-button-primary" href="{{ url_for('tags.form_tag_edit', uuid=uuid) }}">Edit</a>  | ||||
|                     <a class="pure-button pure-button-primary" href="{{ url_for('tags.delete', uuid=uuid) }}" title="Deletes and removes tag">Delete</a> | ||||
|   | ||||
							
								
								
									
										302
									
								
								changedetectionio/blueprint/ui/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,302 @@ | ||||
| import time | ||||
| from flask import Blueprint, request, redirect, url_for, flash, render_template, session | ||||
| from loguru import logger | ||||
| from functools import wraps | ||||
|  | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from changedetectionio.blueprint.ui.edit import construct_blueprint as construct_edit_blueprint | ||||
| from changedetectionio.blueprint.ui.notification import construct_blueprint as construct_notification_blueprint | ||||
| from changedetectionio.blueprint.ui.views import construct_blueprint as construct_views_blueprint | ||||
|  | ||||
| def construct_blueprint(datastore: ChangeDetectionStore, update_q, running_update_threads, queuedWatchMetaData): | ||||
|     ui_blueprint = Blueprint('ui', __name__, template_folder="templates") | ||||
|      | ||||
|     # Register the edit blueprint | ||||
|     edit_blueprint = construct_edit_blueprint(datastore, update_q, queuedWatchMetaData) | ||||
|     ui_blueprint.register_blueprint(edit_blueprint) | ||||
|      | ||||
|     # Register the notification blueprint | ||||
|     notification_blueprint = construct_notification_blueprint(datastore) | ||||
|     ui_blueprint.register_blueprint(notification_blueprint) | ||||
|      | ||||
|     # Register the views blueprint | ||||
|     views_blueprint = construct_views_blueprint(datastore, update_q, queuedWatchMetaData) | ||||
|     ui_blueprint.register_blueprint(views_blueprint) | ||||
|      | ||||
|     # Import the login decorator | ||||
|     from changedetectionio.auth_decorator import login_optionally_required | ||||
|  | ||||
|     @ui_blueprint.route("/clear_history/<string:uuid>", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def clear_watch_history(uuid): | ||||
|         try: | ||||
|             datastore.clear_watch_history(uuid) | ||||
|         except KeyError: | ||||
|             flash('Watch not found', 'error') | ||||
|         else: | ||||
|             flash("Cleared snapshot history for watch {}".format(uuid)) | ||||
|  | ||||
|         return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|     @ui_blueprint.route("/clear_history", methods=['GET', 'POST']) | ||||
|     @login_optionally_required | ||||
|     def clear_all_history(): | ||||
|         if request.method == 'POST': | ||||
|             confirmtext = request.form.get('confirmtext') | ||||
|  | ||||
|             if confirmtext == 'clear': | ||||
|                 for uuid in datastore.data['watching'].keys(): | ||||
|                     datastore.clear_watch_history(uuid) | ||||
|  | ||||
|                 flash("Cleared snapshot history for all watches") | ||||
|             else: | ||||
|                 flash('Incorrect confirmation text.', 'error') | ||||
|  | ||||
|             return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|         output = render_template("clear_all_history.html") | ||||
|         return output | ||||
|  | ||||
|     # Clear all statuses, so we do not see the 'unviewed' class | ||||
|     @ui_blueprint.route("/form/mark-all-viewed", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def mark_all_viewed(): | ||||
|         # Save the current newest history as the most recently viewed | ||||
|         with_errors = request.args.get('with_errors') == "1" | ||||
|         for watch_uuid, watch in datastore.data['watching'].items(): | ||||
|             if with_errors and not watch.get('last_error'): | ||||
|                 continue | ||||
|             datastore.set_last_viewed(watch_uuid, int(time.time())) | ||||
|  | ||||
|         return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|     @ui_blueprint.route("/delete", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def form_delete(): | ||||
|         uuid = request.args.get('uuid') | ||||
|  | ||||
|         if uuid != 'all' and not uuid in datastore.data['watching'].keys(): | ||||
|             flash('The watch by UUID {} does not exist.'.format(uuid), 'error') | ||||
|             return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|         # More for testing, possible to return the first/only | ||||
|         if uuid == 'first': | ||||
|             uuid = list(datastore.data['watching'].keys()).pop() | ||||
|         datastore.delete(uuid) | ||||
|         flash('Deleted.') | ||||
|  | ||||
|         return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|     @ui_blueprint.route("/clone", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def form_clone(): | ||||
|         uuid = request.args.get('uuid') | ||||
|         # More for testing, possible to return the first/only | ||||
|         if uuid == 'first': | ||||
|             uuid = list(datastore.data['watching'].keys()).pop() | ||||
|  | ||||
|         new_uuid = datastore.clone(uuid) | ||||
|  | ||||
|         if not datastore.data['watching'].get(uuid).get('paused'): | ||||
|             update_q.put(queuedWatchMetaData.PrioritizedItem(priority=5, item={'uuid': new_uuid})) | ||||
|  | ||||
|         flash('Cloned, you are editing the new watch.') | ||||
|  | ||||
|         return redirect(url_for("ui.ui_edit.edit_page", uuid=new_uuid)) | ||||
|  | ||||
|     @ui_blueprint.route("/checknow", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def form_watch_checknow(): | ||||
|         # Forced recheck will skip the 'skip if content is the same' rule (, 'reprocess_existing_data': True}))) | ||||
|         tag = request.args.get('tag') | ||||
|         uuid = request.args.get('uuid') | ||||
|         with_errors = request.args.get('with_errors') == "1" | ||||
|  | ||||
|         i = 0 | ||||
|  | ||||
|         running_uuids = [] | ||||
|         for t in running_update_threads: | ||||
|             running_uuids.append(t.current_uuid) | ||||
|  | ||||
|         if uuid: | ||||
|             if uuid not in running_uuids: | ||||
|                 update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|                 i += 1 | ||||
|  | ||||
|         else: | ||||
|             # Recheck all, including muted | ||||
|             for watch_uuid, watch in datastore.data['watching'].items(): | ||||
|                 if not watch['paused']: | ||||
|                     if watch_uuid not in running_uuids: | ||||
|                         if with_errors and not watch.get('last_error'): | ||||
|                             continue | ||||
|  | ||||
|                         if tag != None and tag not in watch['tags']: | ||||
|                             continue | ||||
|  | ||||
|                         update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid})) | ||||
|                         i += 1 | ||||
|  | ||||
|         if i == 1: | ||||
|             flash("Queued 1 watch for rechecking.") | ||||
|         if i > 1: | ||||
|             flash("Queued {} watches for rechecking.".format(i)) | ||||
|         if i == 0: | ||||
|             flash("No watches available to recheck.") | ||||
|  | ||||
|         return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|     @ui_blueprint.route("/form/checkbox-operations", methods=['POST']) | ||||
|     @login_optionally_required | ||||
|     def form_watch_list_checkbox_operations(): | ||||
|         op = request.form['op'] | ||||
|         uuids = request.form.getlist('uuids') | ||||
|  | ||||
|         if (op == 'delete'): | ||||
|             for uuid in uuids: | ||||
|                 uuid = uuid.strip() | ||||
|                 if datastore.data['watching'].get(uuid): | ||||
|                     datastore.delete(uuid.strip()) | ||||
|             flash("{} watches deleted".format(len(uuids))) | ||||
|  | ||||
|         elif (op == 'pause'): | ||||
|             for uuid in uuids: | ||||
|                 uuid = uuid.strip() | ||||
|                 if datastore.data['watching'].get(uuid): | ||||
|                     datastore.data['watching'][uuid.strip()]['paused'] = True | ||||
|             flash("{} watches paused".format(len(uuids))) | ||||
|  | ||||
|         elif (op == 'unpause'): | ||||
|             for uuid in uuids: | ||||
|                 uuid = uuid.strip() | ||||
|                 if datastore.data['watching'].get(uuid): | ||||
|                     datastore.data['watching'][uuid.strip()]['paused'] = False | ||||
|             flash("{} watches unpaused".format(len(uuids))) | ||||
|  | ||||
|         elif (op == 'mark-viewed'): | ||||
|             for uuid in uuids: | ||||
|                 uuid = uuid.strip() | ||||
|                 if datastore.data['watching'].get(uuid): | ||||
|                     datastore.set_last_viewed(uuid, int(time.time())) | ||||
|             flash("{} watches updated".format(len(uuids))) | ||||
|  | ||||
|         elif (op == 'mute'): | ||||
|             for uuid in uuids: | ||||
|                 uuid = uuid.strip() | ||||
|                 if datastore.data['watching'].get(uuid): | ||||
|                     datastore.data['watching'][uuid.strip()]['notification_muted'] = True | ||||
|             flash("{} watches muted".format(len(uuids))) | ||||
|  | ||||
|         elif (op == 'unmute'): | ||||
|             for uuid in uuids: | ||||
|                 uuid = uuid.strip() | ||||
|                 if datastore.data['watching'].get(uuid): | ||||
|                     datastore.data['watching'][uuid.strip()]['notification_muted'] = False | ||||
|             flash("{} watches un-muted".format(len(uuids))) | ||||
|  | ||||
|         elif (op == 'recheck'): | ||||
|             for uuid in uuids: | ||||
|                 uuid = uuid.strip() | ||||
|                 if datastore.data['watching'].get(uuid): | ||||
|                     # Recheck and require a full reprocessing | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|             flash("{} watches queued for rechecking".format(len(uuids))) | ||||
|  | ||||
|         elif (op == 'clear-errors'): | ||||
|             for uuid in uuids: | ||||
|                 uuid = uuid.strip() | ||||
|                 if datastore.data['watching'].get(uuid): | ||||
|                     datastore.data['watching'][uuid]["last_error"] = False | ||||
|             flash(f"{len(uuids)} watches errors cleared") | ||||
|  | ||||
|         elif (op == 'clear-history'): | ||||
|             for uuid in uuids: | ||||
|                 uuid = uuid.strip() | ||||
|                 if datastore.data['watching'].get(uuid): | ||||
|                     datastore.clear_watch_history(uuid) | ||||
|             flash("{} watches cleared/reset.".format(len(uuids))) | ||||
|  | ||||
|         elif (op == 'notification-default'): | ||||
|             from changedetectionio.notification import ( | ||||
|                 default_notification_format_for_watch | ||||
|             ) | ||||
|             for uuid in uuids: | ||||
|                 uuid = uuid.strip() | ||||
|                 if datastore.data['watching'].get(uuid): | ||||
|                     datastore.data['watching'][uuid.strip()]['notification_title'] = None | ||||
|                     datastore.data['watching'][uuid.strip()]['notification_body'] = None | ||||
|                     datastore.data['watching'][uuid.strip()]['notification_urls'] = [] | ||||
|                     datastore.data['watching'][uuid.strip()]['notification_format'] = default_notification_format_for_watch | ||||
|             flash("{} watches set to use default notification settings".format(len(uuids))) | ||||
|  | ||||
|         elif (op == 'assign-tag'): | ||||
|             op_extradata = request.form.get('op_extradata', '').strip() | ||||
|             if op_extradata: | ||||
|                 tag_uuid = datastore.add_tag(title=op_extradata) | ||||
|                 if op_extradata and tag_uuid: | ||||
|                     for uuid in uuids: | ||||
|                         uuid = uuid.strip() | ||||
|                         if datastore.data['watching'].get(uuid): | ||||
|                             # Bug in old versions caused by bad edit page/tag handler | ||||
|                             if isinstance(datastore.data['watching'][uuid]['tags'], str): | ||||
|                                 datastore.data['watching'][uuid]['tags'] = [] | ||||
|  | ||||
|                             datastore.data['watching'][uuid]['tags'].append(tag_uuid) | ||||
|  | ||||
|             flash(f"{len(uuids)} watches were tagged") | ||||
|  | ||||
|         return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|  | ||||
|     @ui_blueprint.route("/share-url/<string:uuid>", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def form_share_put_watch(uuid): | ||||
|         """Given a watch UUID, upload the info and return a share-link | ||||
|            the share-link can be imported/added""" | ||||
|         import requests | ||||
|         import json | ||||
|         from copy import deepcopy | ||||
|  | ||||
|         # more for testing | ||||
|         if uuid == 'first': | ||||
|             uuid = list(datastore.data['watching'].keys()).pop() | ||||
|  | ||||
|         # copy it to memory as trim off what we dont need (history) | ||||
|         watch = deepcopy(datastore.data['watching'].get(uuid)) | ||||
|         # For older versions that are not a @property | ||||
|         if (watch.get('history')): | ||||
|             del (watch['history']) | ||||
|  | ||||
|         # for safety/privacy | ||||
|         for k in list(watch.keys()): | ||||
|             if k.startswith('notification_'): | ||||
|                 del watch[k] | ||||
|  | ||||
|         for r in['uuid', 'last_checked', 'last_changed']: | ||||
|             if watch.get(r): | ||||
|                 del (watch[r]) | ||||
|  | ||||
|         # Add the global stuff which may have an impact | ||||
|         watch['ignore_text'] += datastore.data['settings']['application']['global_ignore_text'] | ||||
|         watch['subtractive_selectors'] += datastore.data['settings']['application']['global_subtractive_selectors'] | ||||
|  | ||||
|         watch_json = json.dumps(watch) | ||||
|  | ||||
|         try: | ||||
|             r = requests.request(method="POST", | ||||
|                                  data={'watch': watch_json}, | ||||
|                                  url="https://changedetection.io/share/share", | ||||
|                                  headers={'App-Guid': datastore.data['app_guid']}) | ||||
|             res = r.json() | ||||
|  | ||||
|             # Add to the flask session | ||||
|             session['share-link'] = f"https://changedetection.io/share/{res['share_key']}" | ||||
|  | ||||
|  | ||||
|         except Exception as e: | ||||
|             logger.error(f"Error sharing -{str(e)}") | ||||
|             flash(f"Could not share, something went wrong while communicating with the share server - {str(e)}", 'error') | ||||
|  | ||||
|         return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|     return ui_blueprint | ||||
							
								
								
									
										333
									
								
								changedetectionio/blueprint/ui/edit.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,333 @@ | ||||
| import time | ||||
| from copy import deepcopy | ||||
| import os | ||||
| import importlib.resources | ||||
| from flask import Blueprint, request, redirect, url_for, flash, render_template, make_response, send_from_directory, abort | ||||
| from loguru import logger | ||||
| from jinja2 import Environment, FileSystemLoader | ||||
|  | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from changedetectionio.auth_decorator import login_optionally_required | ||||
| from changedetectionio.time_handler import is_within_schedule | ||||
|  | ||||
| def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMetaData): | ||||
|     edit_blueprint = Blueprint('ui_edit', __name__, template_folder="../ui/templates") | ||||
|      | ||||
|     def _watch_has_tag_options_set(watch): | ||||
|         """This should be fixed better so that Tag is some proper Model, a tag is just a Watch also""" | ||||
|         for tag_uuid, tag in datastore.data['settings']['application'].get('tags', {}).items(): | ||||
|             if tag_uuid in watch.get('tags', []) and (tag.get('include_filters') or tag.get('subtractive_selectors')): | ||||
|                 return True | ||||
|  | ||||
|     @edit_blueprint.route("/edit/<string:uuid>", methods=['GET', 'POST']) | ||||
|     @login_optionally_required | ||||
|     # https://stackoverflow.com/questions/42984453/wtforms-populate-form-with-data-if-data-exists | ||||
|     # https://wtforms.readthedocs.io/en/3.0.x/forms/#wtforms.form.Form.populate_obj ? | ||||
|     def edit_page(uuid): | ||||
|         from changedetectionio import forms | ||||
|         from changedetectionio.blueprint.browser_steps.browser_steps import browser_step_ui_config | ||||
|         from changedetectionio import processors | ||||
|         import importlib | ||||
|  | ||||
|         # More for testing, possible to return the first/only | ||||
|         if not datastore.data['watching'].keys(): | ||||
|             flash("No watches to edit", "error") | ||||
|             return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|         if uuid == 'first': | ||||
|             uuid = list(datastore.data['watching'].keys()).pop() | ||||
|  | ||||
|         if not uuid in datastore.data['watching']: | ||||
|             flash("No watch with the UUID %s found." % (uuid), "error") | ||||
|             return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|         switch_processor = request.args.get('switch_processor') | ||||
|         if switch_processor: | ||||
|             for p in processors.available_processors(): | ||||
|                 if p[0] == switch_processor: | ||||
|                     datastore.data['watching'][uuid]['processor'] = switch_processor | ||||
|                     flash(f"Switched to mode - {p[1]}.") | ||||
|                     datastore.clear_watch_history(uuid) | ||||
|                     redirect(url_for('ui_edit.edit_page', uuid=uuid)) | ||||
|  | ||||
|         # be sure we update with a copy instead of accidently editing the live object by reference | ||||
|         default = deepcopy(datastore.data['watching'][uuid]) | ||||
|  | ||||
|         # Defaults for proxy choice | ||||
|         if datastore.proxy_list is not None:  # When enabled | ||||
|             # @todo | ||||
|             # Radio needs '' not None, or incase that the chosen one no longer exists | ||||
|             if default['proxy'] is None or not any(default['proxy'] in tup for tup in datastore.proxy_list): | ||||
|                 default['proxy'] = '' | ||||
|         # proxy_override set to the json/text list of the items | ||||
|  | ||||
|         # Does it use some custom form? does one exist? | ||||
|         processor_name = datastore.data['watching'][uuid].get('processor', '') | ||||
|         processor_classes = next((tpl for tpl in processors.find_processors() if tpl[1] == processor_name), None) | ||||
|         if not processor_classes: | ||||
|             flash(f"Cannot load the edit form for processor/plugin '{processor_classes[1]}', plugin missing?", 'error') | ||||
|             return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|         parent_module = processors.get_parent_module(processor_classes[0]) | ||||
|  | ||||
|         try: | ||||
|             # Get the parent of the "processor.py" go up one, get the form (kinda spaghetti but its reusing existing code) | ||||
|             forms_module = importlib.import_module(f"{parent_module.__name__}.forms") | ||||
|             # Access the 'processor_settings_form' class from the 'forms' module | ||||
|             form_class = getattr(forms_module, 'processor_settings_form') | ||||
|         except ModuleNotFoundError as e: | ||||
|             # .forms didnt exist | ||||
|             form_class = forms.processor_text_json_diff_form | ||||
|         except AttributeError as e: | ||||
|             # .forms exists but no useful form | ||||
|             form_class = forms.processor_text_json_diff_form | ||||
|  | ||||
|         form = form_class(formdata=request.form if request.method == 'POST' else None, | ||||
|                           data=default, | ||||
|                           extra_notification_tokens=default.extra_notification_token_values(), | ||||
|                           default_system_settings=datastore.data['settings'] | ||||
|                           ) | ||||
|  | ||||
|         # For the form widget tag UUID back to "string name" for the field | ||||
|         form.tags.datastore = datastore | ||||
|  | ||||
|         # Used by some forms that need to dig deeper | ||||
|         form.datastore = datastore | ||||
|         form.watch = default | ||||
|  | ||||
|         for p in datastore.extra_browsers: | ||||
|             form.fetch_backend.choices.append(p) | ||||
|  | ||||
|         form.fetch_backend.choices.append(("system", 'System settings default')) | ||||
|  | ||||
|         # form.browser_steps[0] can be assumed that we 'goto url' first | ||||
|  | ||||
|         if datastore.proxy_list is None: | ||||
|             # @todo - Couldn't get setattr() etc dynamic addition working, so remove it instead | ||||
|             del form.proxy | ||||
|         else: | ||||
|             form.proxy.choices = [('', 'Default')] | ||||
|             for p in datastore.proxy_list: | ||||
|                 form.proxy.choices.append(tuple((p, datastore.proxy_list[p]['label']))) | ||||
|  | ||||
|  | ||||
|         if request.method == 'POST' and form.validate(): | ||||
|  | ||||
|             # If they changed processor, it makes sense to reset it. | ||||
|             if datastore.data['watching'][uuid].get('processor') != form.data.get('processor'): | ||||
|                 datastore.data['watching'][uuid].clear_watch() | ||||
|                 flash("Reset watch history due to change of processor") | ||||
|  | ||||
|             extra_update_obj = { | ||||
|                 'consecutive_filter_failures': 0, | ||||
|                 'last_error' : False | ||||
|             } | ||||
|  | ||||
|             if request.args.get('unpause_on_save'): | ||||
|                 extra_update_obj['paused'] = False | ||||
|  | ||||
|             extra_update_obj['time_between_check'] = form.time_between_check.data | ||||
|  | ||||
|              # Ignore text | ||||
|             form_ignore_text = form.ignore_text.data | ||||
|             datastore.data['watching'][uuid]['ignore_text'] = form_ignore_text | ||||
|  | ||||
|             # Be sure proxy value is None | ||||
|             if datastore.proxy_list is not None and form.data['proxy'] == '': | ||||
|                 extra_update_obj['proxy'] = None | ||||
|  | ||||
|             # Unsetting all filter_text methods should make it go back to default | ||||
|             # This particularly affects tests running | ||||
|             if 'filter_text_added' in form.data and not form.data.get('filter_text_added') \ | ||||
|                     and 'filter_text_replaced' in form.data and not form.data.get('filter_text_replaced') \ | ||||
|                     and 'filter_text_removed' in form.data and not form.data.get('filter_text_removed'): | ||||
|                 extra_update_obj['filter_text_added'] = True | ||||
|                 extra_update_obj['filter_text_replaced'] = True | ||||
|                 extra_update_obj['filter_text_removed'] = True | ||||
|  | ||||
|             # Because wtforms doesn't support accessing other data in process_ , but we convert the CSV list of tags back to a list of UUIDs | ||||
|             tag_uuids = [] | ||||
|             if form.data.get('tags'): | ||||
|                 # Sometimes in testing this can be list, dont know why | ||||
|                 if type(form.data.get('tags')) == list: | ||||
|                     extra_update_obj['tags'] = form.data.get('tags') | ||||
|                 else: | ||||
|                     for t in form.data.get('tags').split(','): | ||||
|                         tag_uuids.append(datastore.add_tag(title=t)) | ||||
|                     extra_update_obj['tags'] = tag_uuids | ||||
|  | ||||
|             datastore.data['watching'][uuid].update(form.data) | ||||
|             datastore.data['watching'][uuid].update(extra_update_obj) | ||||
|  | ||||
|             if not datastore.data['watching'][uuid].get('tags'): | ||||
|                 # Force it to be a list, because form.data['tags'] will be string if nothing found | ||||
|                 # And del(form.data['tags'] ) wont work either for some reason | ||||
|                 datastore.data['watching'][uuid]['tags'] = [] | ||||
|  | ||||
|             # Recast it if need be to right data Watch handler | ||||
|             watch_class = processors.get_custom_watch_obj_for_processor(form.data.get('processor')) | ||||
|             datastore.data['watching'][uuid] = watch_class(datastore_path=datastore.datastore_path, default=datastore.data['watching'][uuid]) | ||||
|             flash("Updated watch - unpaused!" if request.args.get('unpause_on_save') else "Updated watch.") | ||||
|  | ||||
|             # Re #286 - We wait for syncing new data to disk in another thread every 60 seconds | ||||
|             # But in the case something is added we should save straight away | ||||
|             datastore.needs_write_urgent = True | ||||
|  | ||||
|             # Do not queue on edit if its not within the time range | ||||
|  | ||||
|             # @todo maybe it should never queue anyway on edit... | ||||
|             is_in_schedule = True | ||||
|             watch = datastore.data['watching'].get(uuid) | ||||
|  | ||||
|             if watch.get('time_between_check_use_default'): | ||||
|                 time_schedule_limit = datastore.data['settings']['requests'].get('time_schedule_limit', {}) | ||||
|             else: | ||||
|                 time_schedule_limit = watch.get('time_schedule_limit') | ||||
|  | ||||
|             tz_name = time_schedule_limit.get('timezone') | ||||
|             if not tz_name: | ||||
|                 tz_name = datastore.data['settings']['application'].get('timezone', 'UTC') | ||||
|  | ||||
|             if time_schedule_limit and time_schedule_limit.get('enabled'): | ||||
|                 try: | ||||
|                     is_in_schedule = is_within_schedule(time_schedule_limit=time_schedule_limit, | ||||
|                                                       default_tz=tz_name | ||||
|                                                       ) | ||||
|                 except Exception as e: | ||||
|                     logger.error( | ||||
|                         f"{uuid} - Recheck scheduler, error handling timezone, check skipped - TZ name '{tz_name}' - {str(e)}") | ||||
|                     return False | ||||
|  | ||||
|             ############################# | ||||
|             if not datastore.data['watching'][uuid].get('paused') and is_in_schedule: | ||||
|                 # Queue the watch for immediate recheck, with a higher priority | ||||
|                 update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|  | ||||
|             # Diff page [edit] link should go back to diff page | ||||
|             if request.args.get("next") and request.args.get("next") == 'diff': | ||||
|                 return redirect(url_for('ui.ui_views.diff_history_page', uuid=uuid)) | ||||
|  | ||||
|             return redirect(url_for('watchlist.index', tag=request.args.get("tag",''))) | ||||
|  | ||||
|         else: | ||||
|             if request.method == 'POST' and not form.validate(): | ||||
|                 flash("An error occurred, please see below.", "error") | ||||
|  | ||||
|             visualselector_data_is_ready = datastore.visualselector_data_is_ready(uuid) | ||||
|  | ||||
|  | ||||
|             # JQ is difficult to install on windows and must be manually added (outside requirements.txt) | ||||
|             jq_support = True | ||||
|             try: | ||||
|                 import jq | ||||
|             except ModuleNotFoundError: | ||||
|                 jq_support = False | ||||
|  | ||||
|             watch = datastore.data['watching'].get(uuid) | ||||
|  | ||||
|             system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver' | ||||
|  | ||||
|             watch_uses_webdriver = False | ||||
|             if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver' or watch.get('fetch_backend', '').startswith('extra_browser_'): | ||||
|                 watch_uses_webdriver = True | ||||
|  | ||||
|             from zoneinfo import available_timezones | ||||
|  | ||||
|             # Only works reliably with Playwright | ||||
|  | ||||
|             template_args = { | ||||
|                 'available_processors': processors.available_processors(), | ||||
|                 'available_timezones': sorted(available_timezones()), | ||||
|                 'browser_steps_config': browser_step_ui_config, | ||||
|                 'emailprefix': os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False), | ||||
|                 'extra_notification_token_placeholder_info': datastore.get_unique_notification_token_placeholders_available(), | ||||
|                 'extra_processor_config': form.extra_tab_content(), | ||||
|                 'extra_title': f" - Edit - {watch.label}", | ||||
|                 'form': form, | ||||
|                 'has_default_notification_urls': True if len(datastore.data['settings']['application']['notification_urls']) else False, | ||||
|                 'has_extra_headers_file': len(datastore.get_all_headers_in_textfile_for_watch(uuid=uuid)) > 0, | ||||
|                 'has_special_tag_options': _watch_has_tag_options_set(watch=watch), | ||||
|                 'watch_uses_webdriver': watch_uses_webdriver, | ||||
|                 'jq_support': jq_support, | ||||
|                 'playwright_enabled': os.getenv('PLAYWRIGHT_DRIVER_URL', False), | ||||
|                 'settings_application': datastore.data['settings']['application'], | ||||
|                 'timezone_default_config': datastore.data['settings']['application'].get('timezone'), | ||||
|                 'using_global_webdriver_wait': not default['webdriver_delay'], | ||||
|                 'uuid': uuid, | ||||
|                 'watch': watch | ||||
|             } | ||||
|  | ||||
|             included_content = None | ||||
|             if form.extra_form_content(): | ||||
|                 # So that the extra panels can access _helpers.html etc, we set the environment to load from templates/ | ||||
|                 # And then render the code from the module | ||||
|                 templates_dir = str(importlib.resources.files("changedetectionio").joinpath('templates')) | ||||
|                 env = Environment(loader=FileSystemLoader(templates_dir)) | ||||
|                 template = env.from_string(form.extra_form_content()) | ||||
|                 included_content = template.render(**template_args) | ||||
|  | ||||
|             output = render_template("edit.html", | ||||
|                                      extra_tab_content=form.extra_tab_content() if form.extra_tab_content() else None, | ||||
|                                      extra_form_content=included_content, | ||||
|                                      **template_args | ||||
|                                      ) | ||||
|  | ||||
|         return output | ||||
|  | ||||
|     @edit_blueprint.route("/edit/<string:uuid>/get-html", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def watch_get_latest_html(uuid): | ||||
|         from io import BytesIO | ||||
|         from flask import send_file | ||||
|         import brotli | ||||
|  | ||||
|         watch = datastore.data['watching'].get(uuid) | ||||
|         if watch and watch.history.keys() and os.path.isdir(watch.watch_data_dir): | ||||
|             latest_filename = list(watch.history.keys())[-1] | ||||
|             html_fname = os.path.join(watch.watch_data_dir, f"{latest_filename}.html.br") | ||||
|             with open(html_fname, 'rb') as f: | ||||
|                 if html_fname.endswith('.br'): | ||||
|                     # Read and decompress the Brotli file | ||||
|                     decompressed_data = brotli.decompress(f.read()) | ||||
|                 else: | ||||
|                     decompressed_data = f.read() | ||||
|  | ||||
|             buffer = BytesIO(decompressed_data) | ||||
|  | ||||
|             return send_file(buffer, as_attachment=True, download_name=f"{latest_filename}.html", mimetype='text/html') | ||||
|  | ||||
|         # Return a 500 error | ||||
|         abort(500) | ||||
|  | ||||
|     # Ajax callback | ||||
|     @edit_blueprint.route("/edit/<string:uuid>/preview-rendered", methods=['POST']) | ||||
|     @login_optionally_required | ||||
|     def watch_get_preview_rendered(uuid): | ||||
|         '''For when viewing the "preview" of the rendered text from inside of Edit''' | ||||
|         from flask import jsonify | ||||
|         from changedetectionio.processors.text_json_diff import prepare_filter_prevew | ||||
|         result = prepare_filter_prevew(watch_uuid=uuid, form_data=request.form, datastore=datastore) | ||||
|         return jsonify(result) | ||||
|  | ||||
|     @edit_blueprint.route("/highlight_submit_ignore_url", methods=['POST']) | ||||
|     @login_optionally_required | ||||
|     def highlight_submit_ignore_url(): | ||||
|         import re | ||||
|         mode = request.form.get('mode') | ||||
|         selection = request.form.get('selection') | ||||
|  | ||||
|         uuid = request.args.get('uuid','') | ||||
|         if datastore.data["watching"].get(uuid): | ||||
|             if mode == 'exact': | ||||
|                 for l in selection.splitlines(): | ||||
|                     datastore.data["watching"][uuid]['ignore_text'].append(l.strip()) | ||||
|             elif mode == 'digit-regex': | ||||
|                 for l in selection.splitlines(): | ||||
|                     # Replace any series of numbers with a regex | ||||
|                     s = re.escape(l.strip()) | ||||
|                     s = re.sub(r'[0-9]+', r'\\d+', s) | ||||
|                     datastore.data["watching"][uuid]['ignore_text'].append('/' + s + '/') | ||||
|  | ||||
|         return f"<a href={url_for('ui.ui_views.preview_page', uuid=uuid)}>Click to preview</a>" | ||||
|      | ||||
|     return edit_blueprint | ||||
							
								
								
									
										106
									
								
								changedetectionio/blueprint/ui/notification.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,106 @@ | ||||
| from flask import Blueprint, request, make_response | ||||
| import random | ||||
| from loguru import logger | ||||
|  | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from changedetectionio.auth_decorator import login_optionally_required | ||||
| from changedetectionio.notification import process_notification | ||||
|  | ||||
| def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     notification_blueprint = Blueprint('ui_notification', __name__, template_folder="../ui/templates") | ||||
|      | ||||
|     # AJAX endpoint for sending a test | ||||
|     @notification_blueprint.route("/notification/send-test/<string:watch_uuid>", methods=['POST']) | ||||
|     @notification_blueprint.route("/notification/send-test", methods=['POST']) | ||||
|     @notification_blueprint.route("/notification/send-test/", methods=['POST']) | ||||
|     @login_optionally_required | ||||
|     def ajax_callback_send_notification_test(watch_uuid=None): | ||||
|  | ||||
|         # Watch_uuid could be unset in the case it`s used in tag editor, global settings | ||||
|         import apprise | ||||
|         from ...apprise_plugin.assets import apprise_asset | ||||
|         from ...apprise_plugin.custom_handlers import apprise_http_custom_handler  # noqa: F401 | ||||
|         apobj = apprise.Apprise(asset=apprise_asset) | ||||
|  | ||||
|         is_global_settings_form = request.args.get('mode', '') == 'global-settings' | ||||
|         is_group_settings_form = request.args.get('mode', '') == 'group-settings' | ||||
|  | ||||
|         # Use an existing random one on the global/main settings form | ||||
|         if not watch_uuid and (is_global_settings_form or is_group_settings_form) \ | ||||
|                 and datastore.data.get('watching'): | ||||
|             logger.debug(f"Send test notification - Choosing random Watch {watch_uuid}") | ||||
|             watch_uuid = random.choice(list(datastore.data['watching'].keys())) | ||||
|  | ||||
|         if not watch_uuid: | ||||
|             return make_response("Error: You must have atleast one watch configured for 'test notification' to work", 400) | ||||
|  | ||||
|         watch = datastore.data['watching'].get(watch_uuid) | ||||
|  | ||||
|         notification_urls = None | ||||
|  | ||||
|         if request.form.get('notification_urls'): | ||||
|             notification_urls = request.form['notification_urls'].strip().splitlines() | ||||
|  | ||||
|         if not notification_urls: | ||||
|             logger.debug("Test notification - Trying by group/tag in the edit form if available") | ||||
|             # On an edit page, we should also fire off to the tags if they have notifications | ||||
|             if request.form.get('tags') and request.form['tags'].strip(): | ||||
|                 for k in request.form['tags'].split(','): | ||||
|                     tag = datastore.tag_exists_by_name(k.strip()) | ||||
|                     notification_urls = tag.get('notifications_urls') if tag and tag.get('notifications_urls') else None | ||||
|  | ||||
|         if not notification_urls and not is_global_settings_form and not is_group_settings_form: | ||||
|             # In the global settings, use only what is typed currently in the text box | ||||
|             logger.debug("Test notification - Trying by global system settings notifications") | ||||
|             if datastore.data['settings']['application'].get('notification_urls'): | ||||
|                 notification_urls = datastore.data['settings']['application']['notification_urls'] | ||||
|  | ||||
|         if not notification_urls: | ||||
|             return 'Error: No Notification URLs set/found' | ||||
|  | ||||
|         for n_url in notification_urls: | ||||
|             if len(n_url.strip()): | ||||
|                 if not apobj.add(n_url): | ||||
|                     return f'Error:  {n_url} is not a valid AppRise URL.' | ||||
|  | ||||
|         try: | ||||
|             # use the same as when it is triggered, but then override it with the form test values | ||||
|             n_object = { | ||||
|                 'watch_url': request.form.get('window_url', "https://changedetection.io"), | ||||
|                 'notification_urls': notification_urls | ||||
|             } | ||||
|  | ||||
|             # Only use if present, if not set in n_object it should use the default system value | ||||
|             if 'notification_format' in request.form and request.form['notification_format'].strip(): | ||||
|                 n_object['notification_format'] = request.form.get('notification_format', '').strip() | ||||
|  | ||||
|             if 'notification_title' in request.form and request.form['notification_title'].strip(): | ||||
|                 n_object['notification_title'] = request.form.get('notification_title', '').strip() | ||||
|             elif datastore.data['settings']['application'].get('notification_title'): | ||||
|                 n_object['notification_title'] = datastore.data['settings']['application'].get('notification_title') | ||||
|             else: | ||||
|                 n_object['notification_title'] = "Test title" | ||||
|  | ||||
|             if 'notification_body' in request.form and request.form['notification_body'].strip(): | ||||
|                 n_object['notification_body'] = request.form.get('notification_body', '').strip() | ||||
|             elif datastore.data['settings']['application'].get('notification_body'): | ||||
|                 n_object['notification_body'] = datastore.data['settings']['application'].get('notification_body') | ||||
|             else: | ||||
|                 n_object['notification_body'] = "Test body" | ||||
|  | ||||
|             n_object['as_async'] = False | ||||
|             n_object.update(watch.extra_notification_token_values()) | ||||
|             sent_obj = process_notification(n_object, datastore) | ||||
|  | ||||
|         except Exception as e: | ||||
|             e_str = str(e) | ||||
|             # Remove this text which is not important and floods the container | ||||
|             e_str = e_str.replace( | ||||
|                 "DEBUG - <class 'apprise.decorators.base.CustomNotifyPlugin.instantiate_plugin.<locals>.CustomNotifyPluginWrapper'>", | ||||
|                 '') | ||||
|  | ||||
|             return make_response(e_str, 400) | ||||
|  | ||||
|         return 'OK - Sent test notifications' | ||||
|  | ||||
|     return notification_blueprint | ||||
| @@ -3,7 +3,7 @@ | ||||
|   <div class="box-wrap inner"> | ||||
|     <form | ||||
|       class="pure-form pure-form-stacked" | ||||
|       action="{{url_for('clear_all_history')}}" | ||||
|       action="{{url_for('ui.clear_all_history')}}" | ||||
|       method="POST" | ||||
|     > | ||||
|       <input type="hidden" name="csrf_token" value="{{ csrf_token() }}" > | ||||
| @@ -37,7 +37,7 @@ | ||||
|         </div> | ||||
|         <br /> | ||||
|         <div class="pure-control-group"> | ||||
|           <a href="{{url_for('index')}}" class="pure-button button-cancel" | ||||
|           <a href="{{url_for('watchlist.index')}}" class="pure-button button-cancel" | ||||
|             >Cancel</a | ||||
|           > | ||||
|         </div> | ||||
							
								
								
									
										220
									
								
								changedetectionio/blueprint/ui/views.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,220 @@ | ||||
| from flask import Blueprint, request, redirect, url_for, flash, render_template, make_response, send_from_directory, abort | ||||
| from flask_login import current_user | ||||
| import os | ||||
| import time | ||||
| from copy import deepcopy | ||||
|  | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from changedetectionio.auth_decorator import login_optionally_required | ||||
| from changedetectionio import html_tools | ||||
|  | ||||
| def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMetaData): | ||||
|     views_blueprint = Blueprint('ui_views', __name__, template_folder="../ui/templates") | ||||
|      | ||||
|     @views_blueprint.route("/preview/<string:uuid>", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def preview_page(uuid): | ||||
|         content = [] | ||||
|         versions = [] | ||||
|         timestamp = None | ||||
|  | ||||
|         # More for testing, possible to return the first/only | ||||
|         if uuid == 'first': | ||||
|             uuid = list(datastore.data['watching'].keys()).pop() | ||||
|  | ||||
|         try: | ||||
|             watch = datastore.data['watching'][uuid] | ||||
|         except KeyError: | ||||
|             flash("No history found for the specified link, bad link?", "error") | ||||
|             return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|         system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver' | ||||
|         extra_stylesheets = [url_for('static_content', group='styles', filename='diff.css')] | ||||
|  | ||||
|         is_html_webdriver = False | ||||
|         if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver' or watch.get('fetch_backend', '').startswith('extra_browser_'): | ||||
|             is_html_webdriver = True | ||||
|         triggered_line_numbers = [] | ||||
|         if datastore.data['watching'][uuid].history_n == 0 and (watch.get_error_text() or watch.get_error_snapshot()): | ||||
|             flash("Preview unavailable - No fetch/check completed or triggers not reached", "error") | ||||
|         else: | ||||
|             # So prepare the latest preview or not | ||||
|             preferred_version = request.args.get('version') | ||||
|             versions = list(watch.history.keys()) | ||||
|             timestamp = versions[-1] | ||||
|             if preferred_version and preferred_version in versions: | ||||
|                 timestamp = preferred_version | ||||
|  | ||||
|             try: | ||||
|                 versions = list(watch.history.keys()) | ||||
|                 content = watch.get_history_snapshot(timestamp) | ||||
|  | ||||
|                 triggered_line_numbers = html_tools.strip_ignore_text(content=content, | ||||
|                                                                       wordlist=watch['trigger_text'], | ||||
|                                                                       mode='line numbers' | ||||
|                                                                       ) | ||||
|  | ||||
|             except Exception as e: | ||||
|                 content.append({'line': f"File doesnt exist or unable to read timestamp {timestamp}", 'classes': ''}) | ||||
|  | ||||
|         output = render_template("preview.html", | ||||
|                                  content=content, | ||||
|                                  current_version=timestamp, | ||||
|                                  history_n=watch.history_n, | ||||
|                                  extra_stylesheets=extra_stylesheets, | ||||
|                                  extra_title=f" - Diff - {watch.label} @ {timestamp}", | ||||
|                                  triggered_line_numbers=triggered_line_numbers, | ||||
|                                  current_diff_url=watch['url'], | ||||
|                                  screenshot=watch.get_screenshot(), | ||||
|                                  watch=watch, | ||||
|                                  uuid=uuid, | ||||
|                                  is_html_webdriver=is_html_webdriver, | ||||
|                                  last_error=watch['last_error'], | ||||
|                                  last_error_text=watch.get_error_text(), | ||||
|                                  last_error_screenshot=watch.get_error_snapshot(), | ||||
|                                  versions=versions | ||||
|                                 ) | ||||
|  | ||||
|         return output | ||||
|  | ||||
|     @views_blueprint.route("/diff/<string:uuid>", methods=['GET', 'POST']) | ||||
|     @login_optionally_required | ||||
|     def diff_history_page(uuid): | ||||
|         from changedetectionio import forms | ||||
|  | ||||
|         # More for testing, possible to return the first/only | ||||
|         if uuid == 'first': | ||||
|             uuid = list(datastore.data['watching'].keys()).pop() | ||||
|  | ||||
|         extra_stylesheets = [url_for('static_content', group='styles', filename='diff.css')] | ||||
|         try: | ||||
|             watch = datastore.data['watching'][uuid] | ||||
|         except KeyError: | ||||
|             flash("No history found for the specified link, bad link?", "error") | ||||
|             return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|         # For submission of requesting an extract | ||||
|         extract_form = forms.extractDataForm(request.form) | ||||
|         if request.method == 'POST': | ||||
|             if not extract_form.validate(): | ||||
|                 flash("An error occurred, please see below.", "error") | ||||
|  | ||||
|             else: | ||||
|                 extract_regex = request.form.get('extract_regex').strip() | ||||
|                 output = watch.extract_regex_from_all_history(extract_regex) | ||||
|                 if output: | ||||
|                     watch_dir = os.path.join(datastore.datastore_path, uuid) | ||||
|                     response = make_response(send_from_directory(directory=watch_dir, path=output, as_attachment=True)) | ||||
|                     response.headers['Content-type'] = 'text/csv' | ||||
|                     response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate' | ||||
|                     response.headers['Pragma'] = 'no-cache' | ||||
|                     response.headers['Expires'] = 0 | ||||
|                     return response | ||||
|  | ||||
|                 flash('Nothing matches that RegEx', 'error') | ||||
|                 redirect(url_for('ui_views.diff_history_page', uuid=uuid)+'#extract') | ||||
|  | ||||
|         history = watch.history | ||||
|         dates = list(history.keys()) | ||||
|  | ||||
|         if len(dates) < 2: | ||||
|             flash("Not enough saved change detection snapshots to produce a report.", "error") | ||||
|             return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|         # Save the current newest history as the most recently viewed | ||||
|         datastore.set_last_viewed(uuid, time.time()) | ||||
|  | ||||
|         # Read as binary and force decode as UTF-8 | ||||
|         # Windows may fail decode in python if we just use 'r' mode (chardet decode exception) | ||||
|         from_version = request.args.get('from_version') | ||||
|         from_version_index = -2  # second newest | ||||
|         if from_version and from_version in dates: | ||||
|             from_version_index = dates.index(from_version) | ||||
|         else: | ||||
|             from_version = dates[from_version_index] | ||||
|  | ||||
|         try: | ||||
|             from_version_file_contents = watch.get_history_snapshot(dates[from_version_index]) | ||||
|         except Exception as e: | ||||
|             from_version_file_contents = f"Unable to read to-version at index {dates[from_version_index]}.\n" | ||||
|  | ||||
|         to_version = request.args.get('to_version') | ||||
|         to_version_index = -1 | ||||
|         if to_version and to_version in dates: | ||||
|             to_version_index = dates.index(to_version) | ||||
|         else: | ||||
|             to_version = dates[to_version_index] | ||||
|  | ||||
|         try: | ||||
|             to_version_file_contents = watch.get_history_snapshot(dates[to_version_index]) | ||||
|         except Exception as e: | ||||
|             to_version_file_contents = "Unable to read to-version at index{}.\n".format(dates[to_version_index]) | ||||
|  | ||||
|         screenshot_url = watch.get_screenshot() | ||||
|  | ||||
|         system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver' | ||||
|  | ||||
|         is_html_webdriver = False | ||||
|         if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver' or watch.get('fetch_backend', '').startswith('extra_browser_'): | ||||
|             is_html_webdriver = True | ||||
|  | ||||
|         password_enabled_and_share_is_off = False | ||||
|         if datastore.data['settings']['application'].get('password') or os.getenv("SALTED_PASS", False): | ||||
|             password_enabled_and_share_is_off = not datastore.data['settings']['application'].get('shared_diff_access') | ||||
|  | ||||
|         output = render_template("diff.html", | ||||
|                                  current_diff_url=watch['url'], | ||||
|                                  from_version=str(from_version), | ||||
|                                  to_version=str(to_version), | ||||
|                                  extra_stylesheets=extra_stylesheets, | ||||
|                                  extra_title=f" - Diff - {watch.label}", | ||||
|                                  extract_form=extract_form, | ||||
|                                  is_html_webdriver=is_html_webdriver, | ||||
|                                  last_error=watch['last_error'], | ||||
|                                  last_error_screenshot=watch.get_error_snapshot(), | ||||
|                                  last_error_text=watch.get_error_text(), | ||||
|                                  left_sticky=True, | ||||
|                                  newest=to_version_file_contents, | ||||
|                                  newest_version_timestamp=dates[-1], | ||||
|                                  password_enabled_and_share_is_off=password_enabled_and_share_is_off, | ||||
|                                  from_version_file_contents=from_version_file_contents, | ||||
|                                  to_version_file_contents=to_version_file_contents, | ||||
|                                  screenshot=screenshot_url, | ||||
|                                  uuid=uuid, | ||||
|                                  versions=dates, # All except current/last | ||||
|                                  watch_a=watch | ||||
|                                  ) | ||||
|  | ||||
|         return output | ||||
|  | ||||
|     @views_blueprint.route("/form/add/quickwatch", methods=['POST']) | ||||
|     @login_optionally_required | ||||
|     def form_quick_watch_add(): | ||||
|         from changedetectionio import forms | ||||
|         form = forms.quickWatchForm(request.form) | ||||
|  | ||||
|         if not form.validate(): | ||||
|             for widget, l in form.errors.items(): | ||||
|                 flash(','.join(l), 'error') | ||||
|             return redirect(url_for('watchlist.index')) | ||||
|  | ||||
|         url = request.form.get('url').strip() | ||||
|         if datastore.url_exists(url): | ||||
|             flash(f'Warning, URL {url} already exists', "notice") | ||||
|  | ||||
|         add_paused = request.form.get('edit_and_watch_submit_button') != None | ||||
|         processor = request.form.get('processor', 'text_json_diff') | ||||
|         new_uuid = datastore.add_watch(url=url, tag=request.form.get('tags').strip(), extras={'paused': add_paused, 'processor': processor}) | ||||
|  | ||||
|         if new_uuid: | ||||
|             if add_paused: | ||||
|                 flash('Watch added in Paused state, saving will unpause.') | ||||
|                 return redirect(url_for('ui.ui_edit.edit_page', uuid=new_uuid, unpause_on_save=1, tag=request.args.get('tag'))) | ||||
|             else: | ||||
|                 # Straight into the queue. | ||||
|                 update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid})) | ||||
|                 flash("Watch added.") | ||||
|  | ||||
|         return redirect(url_for('watchlist.index', tag=request.args.get('tag',''))) | ||||
|  | ||||
|     return views_blueprint | ||||
							
								
								
									
										111
									
								
								changedetectionio/blueprint/watchlist/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,111 @@ | ||||
| import os | ||||
| import time | ||||
|  | ||||
| from flask import Blueprint, request, make_response, render_template, redirect, url_for, flash, session | ||||
| from flask_login import current_user | ||||
| from flask_paginate import Pagination, get_page_parameter | ||||
|  | ||||
| from changedetectionio import forms | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from changedetectionio.auth_decorator import login_optionally_required | ||||
|  | ||||
| def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMetaData): | ||||
|     watchlist_blueprint = Blueprint('watchlist', __name__, template_folder="templates") | ||||
|      | ||||
|     @watchlist_blueprint.route("/", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def index(): | ||||
|         active_tag_req = request.args.get('tag', '').lower().strip() | ||||
|         active_tag_uuid = active_tag = None | ||||
|  | ||||
|         # Be sure limit_tag is a uuid | ||||
|         if active_tag_req: | ||||
|             for uuid, tag in datastore.data['settings']['application'].get('tags', {}).items(): | ||||
|                 if active_tag_req == tag.get('title', '').lower().strip() or active_tag_req == uuid: | ||||
|                     active_tag = tag | ||||
|                     active_tag_uuid = uuid | ||||
|                     break | ||||
|  | ||||
|         # Redirect for the old rss path which used the /?rss=true | ||||
|         if request.args.get('rss'): | ||||
|             return redirect(url_for('rss.feed', tag=active_tag_uuid)) | ||||
|  | ||||
|         op = request.args.get('op') | ||||
|         if op: | ||||
|             uuid = request.args.get('uuid') | ||||
|             if op == 'pause': | ||||
|                 datastore.data['watching'][uuid].toggle_pause() | ||||
|             elif op == 'mute': | ||||
|                 datastore.data['watching'][uuid].toggle_mute() | ||||
|  | ||||
|             datastore.needs_write = True | ||||
|             return redirect(url_for('watchlist.index', tag = active_tag_uuid)) | ||||
|  | ||||
|         # Sort by last_changed and add the uuid which is usually the key.. | ||||
|         sorted_watches = [] | ||||
|         with_errors = request.args.get('with_errors') == "1" | ||||
|         errored_count = 0 | ||||
|         search_q = request.args.get('q').strip().lower() if request.args.get('q') else False | ||||
|         for uuid, watch in datastore.data['watching'].items(): | ||||
|             if with_errors and not watch.get('last_error'): | ||||
|                 continue | ||||
|  | ||||
|             if active_tag_uuid and not active_tag_uuid in watch['tags']: | ||||
|                     continue | ||||
|             if watch.get('last_error'): | ||||
|                 errored_count += 1 | ||||
|  | ||||
|             if search_q: | ||||
|                 if (watch.get('title') and search_q in watch.get('title').lower()) or search_q in watch.get('url', '').lower(): | ||||
|                     sorted_watches.append(watch) | ||||
|                 elif watch.get('last_error') and search_q in watch.get('last_error').lower(): | ||||
|                     sorted_watches.append(watch) | ||||
|             else: | ||||
|                 sorted_watches.append(watch) | ||||
|  | ||||
|         form = forms.quickWatchForm(request.form) | ||||
|         page = request.args.get(get_page_parameter(), type=int, default=1) | ||||
|         total_count = len(sorted_watches) | ||||
|  | ||||
|         pagination = Pagination(page=page, | ||||
|                                 total=total_count, | ||||
|                                 per_page=datastore.data['settings']['application'].get('pager_size', 50), css_framework="semantic") | ||||
|  | ||||
|         sorted_tags = sorted(datastore.data['settings']['application'].get('tags').items(), key=lambda x: x[1]['title']) | ||||
|         output = render_template( | ||||
|             "watch-overview.html", | ||||
|                                  active_tag=active_tag, | ||||
|                                  active_tag_uuid=active_tag_uuid, | ||||
|                                  app_rss_token=datastore.data['settings']['application'].get('rss_access_token'), | ||||
|                                  datastore=datastore, | ||||
|                                  errored_count=errored_count, | ||||
|                                  form=form, | ||||
|                                  guid=datastore.data['app_guid'], | ||||
|                                  has_proxies=datastore.proxy_list, | ||||
|                                  has_unviewed=datastore.has_unviewed, | ||||
|                                  hosted_sticky=os.getenv("SALTED_PASS", False) == False, | ||||
|                                  now_time_server=time.time(), | ||||
|                                  pagination=pagination, | ||||
|                                  queued_uuids=[q_uuid.item['uuid'] for q_uuid in update_q.queue], | ||||
|                                  search_q=request.args.get('q', '').strip(), | ||||
|                                  sort_attribute=request.args.get('sort') if request.args.get('sort') else request.cookies.get('sort'), | ||||
|                                  sort_order=request.args.get('order') if request.args.get('order') else request.cookies.get('order'), | ||||
|                                  system_default_fetcher=datastore.data['settings']['application'].get('fetch_backend'), | ||||
|                                  tags=sorted_tags, | ||||
|                                  watches=sorted_watches | ||||
|                                  ) | ||||
|  | ||||
|         if session.get('share-link'): | ||||
|             del(session['share-link']) | ||||
|  | ||||
|         resp = make_response(output) | ||||
|  | ||||
|         # The template can run on cookie or url query info | ||||
|         if request.args.get('sort'): | ||||
|             resp.set_cookie('sort', request.args.get('sort')) | ||||
|         if request.args.get('order'): | ||||
|             resp.set_cookie('order', request.args.get('order')) | ||||
|  | ||||
|         return resp | ||||
|          | ||||
|     return watchlist_blueprint | ||||
| @@ -3,10 +3,19 @@ | ||||
| {% from '_helpers.html' import render_simple_field, render_field, render_nolabel_field, sort_by_title %} | ||||
| <script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script> | ||||
| <script>let nowtimeserver={{ now_time_server }};</script> | ||||
| 
 | ||||
| <style> | ||||
| .checking-now .last-checked { | ||||
|     background-image: linear-gradient(to bottom, transparent 0%, rgba(0,0,0,0.05) 40%, rgba(0,0,0,0.1) 100%); | ||||
|     background-size: 0 100%; | ||||
|     background-repeat: no-repeat; | ||||
|     transition: background-size 0.9s ease | ||||
| } | ||||
| </style> | ||||
| <div class="box"> | ||||
| 
 | ||||
|     <form class="pure-form" action="{{ url_for('form_quick_watch_add') }}" method="POST" id="new-watch-form"> | ||||
|     <form class="pure-form" action="{{ url_for('ui.ui_views.form_quick_watch_add', tag=active_tag_uuid) }}" method="POST" id="new-watch-form"> | ||||
|         <input type="hidden" name="csrf_token" value="{{ csrf_token() }}" > | ||||
|         <fieldset> | ||||
|             <legend>Add a new change detection watch</legend> | ||||
| @@ -25,7 +34,7 @@ | ||||
|         <span style="color:#eee; font-size: 80%;"><img alt="Create a shareable link" style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread-white.svg')}}" > Tip: You can also add 'shared' watches. <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Sharing-a-Watch">More info</a></span> | ||||
|     </form> | ||||
| 
 | ||||
|     <form class="pure-form" action="{{ url_for('form_watch_list_checkbox_operations') }}" method="POST" id="watch-list-form"> | ||||
|     <form class="pure-form" action="{{ url_for('ui.form_watch_list_checkbox_operations') }}" method="POST" id="watch-list-form"> | ||||
|     <input type="hidden" name="csrf_token" value="{{ csrf_token() }}" > | ||||
|     <input type="hidden" id="op_extradata" name="op_extradata" value="" > | ||||
|     <div id="checkbox-operations"> | ||||
| @@ -46,12 +55,12 @@ | ||||
|     {% endif %} | ||||
|     {% if search_q %}<div id="search-result-info">Searching "<strong><i>{{search_q}}</i></strong>"</div>{% endif %} | ||||
|     <div> | ||||
|         <a href="{{url_for('index')}}" class="pure-button button-tag {{'active' if not active_tag_uuid }}">All</a> | ||||
|         <a href="{{url_for('watchlist.index')}}" class="pure-button button-tag {{'active' if not active_tag_uuid }}">All</a> | ||||
| 
 | ||||
|     <!-- tag list --> | ||||
|     {% for uuid, tag in tags %} | ||||
|         {% if tag != "" %} | ||||
|             <a href="{{url_for('index', tag=uuid) }}" class="pure-button button-tag {{'active' if active_tag_uuid == uuid }}">{{ tag.title }}</a> | ||||
|             <a href="{{url_for('watchlist.index', tag=uuid) }}" class="pure-button button-tag {{'active' if active_tag_uuid == uuid }}">{{ tag.title }}</a> | ||||
|         {% endif %} | ||||
|     {% endfor %} | ||||
|     </div> | ||||
| @@ -72,27 +81,27 @@ | ||||
|             <tr> | ||||
|                 {% set link_order = "desc" if sort_order  == 'asc' else "asc" %} | ||||
|                 {% set arrow_span = "" %} | ||||
|                 <th><input style="vertical-align: middle" type="checkbox" id="check-all" > <a class="{{ 'active '+link_order if sort_attribute == 'date_created' else 'inactive' }}"  href="{{url_for('index', sort='date_created', order=link_order, tag=active_tag_uuid)}}"># <span class='arrow {{link_order}}'></span></a></th> | ||||
|                 <th><input style="vertical-align: middle" type="checkbox" id="check-all" > <a class="{{ 'active '+link_order if sort_attribute == 'date_created' else 'inactive' }}"  href="{{url_for('watchlist.index', sort='date_created', order=link_order, tag=active_tag_uuid)}}"># <span class='arrow {{link_order}}'></span></a></th> | ||||
|                 <th class="empty-cell"></th> | ||||
|                 <th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('index', sort='label', order=link_order, tag=active_tag_uuid)}}">Website <span class='arrow {{link_order}}'></span></a></th> | ||||
|                 <th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('watchlist.index', sort='label', order=link_order, tag=active_tag_uuid)}}">Website <span class='arrow {{link_order}}'></span></a></th> | ||||
|              {% if any_has_restock_price_processor %} | ||||
|                 <th>Restock & Price</th> | ||||
|              {% endif %} | ||||
|                 <th><a class="{{ 'active '+link_order if sort_attribute == 'last_checked' else 'inactive' }}" href="{{url_for('index', sort='last_checked', order=link_order, tag=active_tag_uuid)}}">Last Checked <span class='arrow {{link_order}}'></span></a></th> | ||||
|                 <th><a class="{{ 'active '+link_order if sort_attribute == 'last_changed' else 'inactive' }}" href="{{url_for('index', sort='last_changed', order=link_order, tag=active_tag_uuid)}}">Last Changed <span class='arrow {{link_order}}'></span></a></th> | ||||
|                 <th><a class="{{ 'active '+link_order if sort_attribute == 'last_checked' else 'inactive' }}" href="{{url_for('watchlist.index', sort='last_checked', order=link_order, tag=active_tag_uuid)}}"><span class="hide-on-mobile">Last</span> Checked <span class='arrow {{link_order}}'></span></a></th> | ||||
|                 <th><a class="{{ 'active '+link_order if sort_attribute == 'last_changed' else 'inactive' }}" href="{{url_for('watchlist.index', sort='last_changed', order=link_order, tag=active_tag_uuid)}}"><span class="hide-on-mobile">Last</span> Changed <span class='arrow {{link_order}}'></span></a></th> | ||||
|                 <th class="empty-cell"></th> | ||||
|             </tr> | ||||
|             </thead> | ||||
|             <tbody> | ||||
|             {% if not watches|length %} | ||||
|             <tr> | ||||
|                 <td colspan="{{ cols_required }}" style="text-wrap: wrap;">No website watches configured, please add a URL in the box above, or <a href="{{ url_for('import_page')}}" >import a list</a>.</td> | ||||
|                 <td colspan="{{ cols_required }}" style="text-wrap: wrap;">No website watches configured, please add a URL in the box above, or <a href="{{ url_for('imports.import_page')}}" >import a list</a>.</td> | ||||
|             </tr> | ||||
|             {% endif %} | ||||
|             {% for watch in (watches|sort(attribute=sort_attribute, reverse=sort_order == 'asc'))|pagination_slice(skip=pagination.skip) %} | ||||
| 
 | ||||
|                 {% set is_unviewed =  watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %} | ||||
| 
 | ||||
|                 {% set is_unviewed = watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %} | ||||
|                 {% set checking_now = is_checking_now(watch) %} | ||||
|             <tr id="{{ watch.uuid }}" | ||||
|                 class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }} processor-{{ watch['processor'] }} | ||||
|                 {% if watch.last_error is defined and watch.last_error != False %}error{% endif %} | ||||
| @@ -100,25 +109,28 @@ | ||||
|                 {% if watch.paused is defined and watch.paused != False %}paused{% endif %} | ||||
|                 {% if is_unviewed %}unviewed{% endif %} | ||||
|                 {% if watch.has_restock_info %} has-restock-info {% if watch['restock']['in_stock'] %}in-stock{% else %}not-in-stock{% endif %} {% else %}no-restock-info{% endif %} | ||||
|                 {% if watch.uuid in queued_uuids %}queued{% endif %}"> | ||||
|                 {% if watch.uuid in queued_uuids %}queued{% endif %} | ||||
|                 {% if checking_now %}checking-now{% endif %} | ||||
|                 "> | ||||
|                 <td class="inline checkbox-uuid" ><input name="uuids"  type="checkbox" value="{{ watch.uuid}} " > <span>{{ loop.index+pagination.skip }}</span></td> | ||||
|                 <td class="inline watch-controls"> | ||||
|                     {% if not watch.paused %} | ||||
|                     <a class="state-off" href="{{url_for('index', op='pause', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause checks" title="Pause checks" class="icon icon-pause" ></a> | ||||
|                     <a class="state-off" href="{{url_for('watchlist.index', op='pause', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause checks" title="Pause checks" class="icon icon-pause" ></a> | ||||
|                     {% else %} | ||||
|                     <a class="state-on" href="{{url_for('index', op='pause', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='play.svg')}}" alt="UnPause checks" title="UnPause checks" class="icon icon-unpause" ></a> | ||||
|                     <a class="state-on" href="{{url_for('watchlist.index', op='pause', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='play.svg')}}" alt="UnPause checks" title="UnPause checks" class="icon icon-unpause" ></a> | ||||
|                     {% endif %} | ||||
|                     <a class="link-mute state-{{'on' if watch.notification_muted else 'off'}}" href="{{url_for('index', op='mute', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications" class="icon icon-mute" ></a> | ||||
|                     {% set mute_label = 'UnMute notification' if watch.notification_muted else 'Mute notification' %} | ||||
|                     <a class="link-mute state-{{'on' if watch.notification_muted else 'off'}}" href="{{url_for('watchlist.index', op='mute', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="{{ mute_label }}" title="{{ mute_label }}" class="icon icon-mute" ></a> | ||||
|                 </td> | ||||
|                 <td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}} | ||||
|                     <a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}"></a> | ||||
|                     <a class="link-spread" href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img src="{{url_for('static_content', group='images', filename='spread.svg')}}" class="status-icon icon icon-spread" title="Create a link to share watch config with others" ></a> | ||||
|                     <a class="link-spread" href="{{url_for('ui.form_share_put_watch', uuid=watch.uuid)}}"><img src="{{url_for('static_content', group='images', filename='spread.svg')}}" class="status-icon icon icon-spread" title="Create a link to share watch config with others" ></a> | ||||
| 
 | ||||
|                     {% if watch.get_fetch_backend == "html_webdriver" | ||||
|                          or (  watch.get_fetch_backend == "system" and system_default_fetcher == 'html_webdriver'  ) | ||||
|                          or "extra_browser_" in watch.get_fetch_backend | ||||
|                     %} | ||||
|                     <img class="status-icon" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" title="Using a Chrome browser" > | ||||
|                     <img class="status-icon" src="{{url_for('static_content', group='images', filename='google-chrome-icon.png')}}" alt="Using a Chrome browser" title="Using a Chrome browser" > | ||||
|                     {% endif %} | ||||
| 
 | ||||
|                     {%if watch.is_pdf  %}<img class="status-icon" src="{{url_for('static_content', group='images', filename='pdf-icon.svg')}}" title="Converting PDF to text" >{% endif %} | ||||
| @@ -128,9 +140,9 @@ | ||||
| 
 | ||||
|                         {% if '403' in watch.last_error %} | ||||
|                             {% if has_proxies %} | ||||
|                                 <a href="{{ url_for('settings_page', uuid=watch.uuid) }}#proxies">Try other proxies/location</a>  | ||||
|                                 <a href="{{ url_for('settings.settings_page', uuid=watch.uuid) }}#proxies">Try other proxies/location</a>  | ||||
|                             {% endif %} | ||||
|                             <a href="{{ url_for('settings_page', uuid=watch.uuid) }}#proxies">Try adding external proxies/locations</a> | ||||
|                             <a href="{{ url_for('settings.settings_page', uuid=watch.uuid) }}#proxies">Try adding external proxies/locations</a> | ||||
|                          | ||||
|                         {% endif %} | ||||
|                         {% if 'empty result or contain only an image' in watch.last_error %} | ||||
| @@ -139,7 +151,7 @@ | ||||
|                     </div> | ||||
|                     {% endif %} | ||||
|                     {% if watch.last_notification_error is defined and watch.last_notification_error != False %} | ||||
|                     <div class="fetch-error notification-error"><a href="{{url_for('notification_logs')}}">{{ watch.last_notification_error }}</a></div> | ||||
|                     <div class="fetch-error notification-error"><a href="{{url_for('settings.notification_logs')}}">{{ watch.last_notification_error }}</a></div> | ||||
|                     {% endif %} | ||||
| 
 | ||||
|                     {% if watch['processor'] == 'text_json_diff'  %} | ||||
| @@ -177,7 +189,14 @@ | ||||
|                     {% endif %} | ||||
|                 </td> | ||||
| {% endif %} | ||||
|                 <td class="last-checked" data-timestamp="{{ watch.last_checked }}">{{watch|format_last_checked_time|safe}}</td> | ||||
|             {#last_checked becomes fetch-start-time#} | ||||
|                 <td class="last-checked" data-timestamp="{{ watch.last_checked }}" {% if checking_now %} data-fetchduration={{ watch.fetch_time }} data-eta_complete="{{ watch.last_checked+watch.fetch_time }}" {% endif %} > | ||||
|                     {% if checking_now %} | ||||
|                         <span class="spinner"></span><span> Checking now</span> | ||||
|                     {% else %} | ||||
|                         {{watch|format_last_checked_time|safe}}</td> | ||||
|                     {% endif %} | ||||
| 
 | ||||
|                 <td class="last-changed" data-timestamp="{{ watch.last_changed }}">{% if watch.history_n >=2 and watch.last_changed >0 %} | ||||
|                     {{watch.last_changed|format_timestamp_timeago}} | ||||
|                     {% else %} | ||||
| @@ -185,20 +204,20 @@ | ||||
|                     {% endif %} | ||||
|                 </td> | ||||
|                 <td> | ||||
|                     <a {% if watch.uuid in queued_uuids %}disabled="true"{% endif %} href="{{ url_for('form_watch_checknow', uuid=watch.uuid, tag=request.args.get('tag')) }}" | ||||
|                     <a {% if watch.uuid in queued_uuids %}disabled="true"{% endif %} href="{{ url_for('ui.form_watch_checknow', uuid=watch.uuid, tag=request.args.get('tag')) }}" | ||||
|                        class="recheck pure-button pure-button-primary">{% if watch.uuid in queued_uuids %}Queued{% else %}Recheck{% endif %}</a> | ||||
|                     <a href="{{ url_for('edit_page', uuid=watch.uuid)}}#general" class="pure-button pure-button-primary">Edit</a> | ||||
|                     <a href="{{ url_for('ui.ui_edit.edit_page', uuid=watch.uuid, tag=active_tag_uuid)}}#general" class="pure-button pure-button-primary">Edit</a> | ||||
|                     {% if watch.history_n >= 2 %} | ||||
| 
 | ||||
|                         {%  if is_unviewed %} | ||||
|                            <a href="{{ url_for('diff_history_page', uuid=watch.uuid, from_version=watch.get_next_snapshot_key_to_last_viewed) }}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">Diff</a> | ||||
|                            <a href="{{ url_for('ui.ui_views.diff_history_page', uuid=watch.uuid, from_version=watch.get_from_version_based_on_last_viewed) }}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">History</a> | ||||
|                         {% else %} | ||||
|                            <a href="{{ url_for('diff_history_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">Diff</a> | ||||
|                            <a href="{{ url_for('ui.ui_views.diff_history_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">History</a> | ||||
|                         {% endif %} | ||||
| 
 | ||||
|                     {% else %} | ||||
|                         {% if watch.history_n == 1 or (watch.history_n ==0 and watch.error_text_ctime )%} | ||||
|                             <a href="{{ url_for('preview_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button pure-button-primary">Preview</a> | ||||
|                             <a href="{{ url_for('ui.ui_views.preview_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button pure-button-primary">Preview</a> | ||||
|                         {% endif %} | ||||
|                     {% endif %} | ||||
|                 </td> | ||||
| @@ -209,20 +228,20 @@ | ||||
|         <ul id="post-list-buttons"> | ||||
|             {% if errored_count %} | ||||
|             <li> | ||||
|                 <a href="{{url_for('index', with_errors=1, tag=request.args.get('tag')) }}" class="pure-button button-tag button-error ">With errors ({{ errored_count }})</a> | ||||
|                 <a href="{{url_for('watchlist.index', with_errors=1, tag=request.args.get('tag')) }}" class="pure-button button-tag button-error ">With errors ({{ errored_count }})</a> | ||||
|             </li> | ||||
|             {% endif %} | ||||
|             {% if has_unviewed %} | ||||
|             <li> | ||||
|                 <a href="{{url_for('mark_all_viewed',with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Mark all viewed</a> | ||||
|                 <a href="{{url_for('ui.mark_all_viewed',with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Mark all viewed</a> | ||||
|             </li> | ||||
|             {% endif %} | ||||
|             <li> | ||||
|                <a href="{{ url_for('form_watch_checknow', tag=active_tag_uuid, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Recheck | ||||
|                <a href="{{ url_for('ui.form_watch_checknow', tag=active_tag_uuid, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Recheck | ||||
|                 all {% if active_tag_uuid %} in "{{active_tag.title}}"{%endif%}</a> | ||||
|             </li> | ||||
|             <li> | ||||
|                 <a href="{{ url_for('rss', tag=active_tag_uuid, token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='Generic_Feed-icon.svg')}}" height="15"></a> | ||||
|                 <a href="{{ url_for('rss.feed', tag=active_tag_uuid, token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='generic_feed-icon.svg')}}" height="15"></a> | ||||
|             </li> | ||||
|         </ul> | ||||
|         {{ pagination.links }} | ||||
							
								
								
									
										134
									
								
								changedetectionio/conditions/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,134 @@ | ||||
| from flask import Blueprint | ||||
|  | ||||
| from json_logic.builtins import BUILTINS | ||||
|  | ||||
| from .exceptions import EmptyConditionRuleRowNotUsable | ||||
| from .pluggy_interface import plugin_manager  # Import the pluggy plugin manager | ||||
| from . import default_plugin | ||||
|  | ||||
| # List of all supported JSON Logic operators | ||||
| operator_choices = [ | ||||
|     (None, "Choose one - Operator"), | ||||
|     (">", "Greater Than"), | ||||
|     ("<", "Less Than"), | ||||
|     (">=", "Greater Than or Equal To"), | ||||
|     ("<=", "Less Than or Equal To"), | ||||
|     ("==", "Equals"), | ||||
|     ("!=", "Not Equals"), | ||||
|     ("in", "Contains"), | ||||
|     ("!in", "Does Not Contain"), | ||||
| ] | ||||
|  | ||||
| # Fields available in the rules | ||||
| field_choices = [ | ||||
|     (None, "Choose one - Field"), | ||||
| ] | ||||
|  | ||||
| # The data we will feed the JSON Rules to see if it passes the test/conditions or not | ||||
| EXECUTE_DATA = {} | ||||
|  | ||||
|  | ||||
| # Define the extended operations dictionary | ||||
| CUSTOM_OPERATIONS = { | ||||
|     **BUILTINS,  # Include all standard operators | ||||
| } | ||||
|  | ||||
| def filter_complete_rules(ruleset): | ||||
|     rules = [ | ||||
|         rule for rule in ruleset | ||||
|         if all(value not in ("", False, "None", None) for value in [rule["operator"], rule["field"], rule["value"]]) | ||||
|     ] | ||||
|     return rules | ||||
|  | ||||
| def convert_to_jsonlogic(logic_operator: str, rule_dict: list): | ||||
|     """ | ||||
|     Convert a structured rule dict into a JSON Logic rule. | ||||
|  | ||||
|     :param rule_dict: Dictionary containing conditions. | ||||
|     :return: JSON Logic rule as a dictionary. | ||||
|     """ | ||||
|  | ||||
|  | ||||
|     json_logic_conditions = [] | ||||
|  | ||||
|     for condition in rule_dict: | ||||
|         operator = condition["operator"] | ||||
|         field = condition["field"] | ||||
|         value = condition["value"] | ||||
|  | ||||
|         if not operator or operator == 'None' or not value or not field: | ||||
|             raise EmptyConditionRuleRowNotUsable() | ||||
|  | ||||
|         # Convert value to int/float if possible | ||||
|         try: | ||||
|             if isinstance(value, str) and "." in value and str != "None": | ||||
|                 value = float(value) | ||||
|             else: | ||||
|                 value = int(value) | ||||
|         except (ValueError, TypeError): | ||||
|             pass  # Keep as a string if conversion fails | ||||
|  | ||||
|         # Handle different JSON Logic operators properly | ||||
|         if operator == "in": | ||||
|             json_logic_conditions.append({"in": [value, {"var": field}]})  # value first | ||||
|         elif operator in ("!", "!!", "-"): | ||||
|             json_logic_conditions.append({operator: [{"var": field}]})  # Unary operators | ||||
|         elif operator in ("min", "max", "cat"): | ||||
|             json_logic_conditions.append({operator: value})  # Multi-argument operators | ||||
|         else: | ||||
|             json_logic_conditions.append({operator: [{"var": field}, value]})  # Standard binary operators | ||||
|  | ||||
|     return {logic_operator: json_logic_conditions} if len(json_logic_conditions) > 1 else json_logic_conditions[0] | ||||
|  | ||||
|  | ||||
| def execute_ruleset_against_all_plugins(current_watch_uuid: str, application_datastruct, ephemeral_data={} ): | ||||
|     """ | ||||
|     Build our data and options by calling our plugins then pass it to jsonlogic and see if the conditions pass | ||||
|  | ||||
|     :param ruleset: JSON Logic rule dictionary. | ||||
|     :param extracted_data: Dictionary containing the facts.   <-- maybe the app struct+uuid | ||||
|     :return: Dictionary of plugin results. | ||||
|     """ | ||||
|     from json_logic import jsonLogic | ||||
|  | ||||
|     EXECUTE_DATA = {} | ||||
|     result = True | ||||
|      | ||||
|     ruleset_settings = application_datastruct['watching'].get(current_watch_uuid) | ||||
|  | ||||
|     if ruleset_settings.get("conditions"): | ||||
|         logic_operator = "and" if ruleset_settings.get("conditions_match_logic", "ALL") == "ALL" else "or" | ||||
|         complete_rules = filter_complete_rules(ruleset_settings['conditions']) | ||||
|         if complete_rules: | ||||
|             # Give all plugins a chance to update the data dict again (that we will test the conditions against) | ||||
|             for plugin in plugin_manager.get_plugins(): | ||||
|                 new_execute_data = plugin.add_data(current_watch_uuid=current_watch_uuid, | ||||
|                                                    application_datastruct=application_datastruct, | ||||
|                                                    ephemeral_data=ephemeral_data) | ||||
|  | ||||
|                 if new_execute_data and isinstance(new_execute_data, dict): | ||||
|                     EXECUTE_DATA.update(new_execute_data) | ||||
|  | ||||
|             # Create the ruleset | ||||
|             ruleset = convert_to_jsonlogic(logic_operator=logic_operator, rule_dict=complete_rules) | ||||
|              | ||||
|             # Pass the custom operations dictionary to jsonLogic | ||||
|             if not jsonLogic(logic=ruleset, data=EXECUTE_DATA, operations=CUSTOM_OPERATIONS): | ||||
|                 result = False | ||||
|  | ||||
|     return {'executed_data': EXECUTE_DATA, 'result': result} | ||||
|  | ||||
| # Load plugins dynamically | ||||
| for plugin in plugin_manager.get_plugins(): | ||||
|     new_ops = plugin.register_operators() | ||||
|     if isinstance(new_ops, dict): | ||||
|         CUSTOM_OPERATIONS.update(new_ops) | ||||
|  | ||||
|     new_operator_choices = plugin.register_operator_choices() | ||||
|     if isinstance(new_operator_choices, list): | ||||
|         operator_choices.extend(new_operator_choices) | ||||
|  | ||||
|     new_field_choices = plugin.register_field_choices() | ||||
|     if isinstance(new_field_choices, list): | ||||
|         field_choices.extend(new_field_choices) | ||||
|  | ||||
							
								
								
									
										81
									
								
								changedetectionio/conditions/blueprint.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,81 @@ | ||||
| # Flask Blueprint Definition | ||||
| import json | ||||
|  | ||||
| from flask import Blueprint | ||||
|  | ||||
| from changedetectionio.conditions import execute_ruleset_against_all_plugins | ||||
|  | ||||
|  | ||||
| def construct_blueprint(datastore): | ||||
|     from changedetectionio.flask_app import login_optionally_required | ||||
|  | ||||
|     conditions_blueprint = Blueprint('conditions', __name__, template_folder="templates") | ||||
|  | ||||
|     @conditions_blueprint.route("/<string:watch_uuid>/verify-condition-single-rule", methods=['POST']) | ||||
|     @login_optionally_required | ||||
|     def verify_condition_single_rule(watch_uuid): | ||||
|         """Verify a single condition rule against the current snapshot""" | ||||
|         from changedetectionio.processors.text_json_diff import prepare_filter_prevew | ||||
|         from flask import request, jsonify | ||||
|         from copy import deepcopy | ||||
|  | ||||
|         ephemeral_data = {} | ||||
|  | ||||
|         # Get the watch data | ||||
|         watch = datastore.data['watching'].get(watch_uuid) | ||||
|         if not watch: | ||||
|             return jsonify({'status': 'error', 'message': 'Watch not found'}), 404 | ||||
|  | ||||
|         # First use prepare_filter_prevew to process the form data | ||||
|         # This will return text_after_filter which is after all current form settings are applied | ||||
|         # Create ephemeral data with the text from the current snapshot | ||||
|  | ||||
|         try: | ||||
|             # Call prepare_filter_prevew to get a processed version of the content with current form settings | ||||
|             # We'll ignore the returned response and just use the datastore which is modified by the function | ||||
|  | ||||
|             # this should apply all filters etc so then we can run the CONDITIONS against the final output text | ||||
|             result = prepare_filter_prevew(datastore=datastore, | ||||
|                                            form_data=request.form, | ||||
|                                            watch_uuid=watch_uuid) | ||||
|  | ||||
|             ephemeral_data['text'] = result.get('after_filter', '') | ||||
|             # Create a temporary watch data structure with this single rule | ||||
|             tmp_watch_data = deepcopy(datastore.data['watching'].get(watch_uuid)) | ||||
|  | ||||
|             # Override the conditions in the temporary watch | ||||
|             rule_json = request.args.get("rule") | ||||
|             rule = json.loads(rule_json) if rule_json else None | ||||
|  | ||||
|             # Should be key/value of field, operator, value | ||||
|             tmp_watch_data['conditions'] = [rule] | ||||
|             tmp_watch_data['conditions_match_logic'] = "ALL"  # Single rule, so use ALL | ||||
|  | ||||
|             # Create a temporary application data structure for the rule check | ||||
|             temp_app_data = { | ||||
|                 'watching': { | ||||
|                     watch_uuid: tmp_watch_data | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             # Execute the rule against the current snapshot with form data | ||||
|             result = execute_ruleset_against_all_plugins( | ||||
|                 current_watch_uuid=watch_uuid, | ||||
|                 application_datastruct=temp_app_data, | ||||
|                 ephemeral_data=ephemeral_data | ||||
|             ) | ||||
|  | ||||
|             return jsonify({ | ||||
|                 'status': 'success', | ||||
|                 'result': result.get('result'), | ||||
|                 'data': result.get('executed_data'), | ||||
|                 'message': 'Condition passes' if result else 'Condition does not pass' | ||||
|             }) | ||||
|  | ||||
|         except Exception as e: | ||||
|             return jsonify({ | ||||
|                 'status': 'error', | ||||
|                 'message': f'Error verifying condition: {str(e)}' | ||||
|             }), 500 | ||||
|  | ||||
|     return conditions_blueprint | ||||
							
								
								
									
										78
									
								
								changedetectionio/conditions/default_plugin.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,78 @@ | ||||
| import re | ||||
|  | ||||
| import pluggy | ||||
| from price_parser import Price | ||||
| from loguru import logger | ||||
|  | ||||
| hookimpl = pluggy.HookimplMarker("changedetectionio_conditions") | ||||
|  | ||||
|  | ||||
| @hookimpl | ||||
| def register_operators(): | ||||
|     def starts_with(_, text, prefix): | ||||
|         return text.lower().strip().startswith(str(prefix).strip().lower()) | ||||
|  | ||||
|     def ends_with(_, text, suffix): | ||||
|         return text.lower().strip().endswith(str(suffix).strip().lower()) | ||||
|  | ||||
|     def length_min(_, text, strlen): | ||||
|         return len(text) >= int(strlen) | ||||
|  | ||||
|     def length_max(_, text, strlen): | ||||
|         return len(text) <= int(strlen) | ||||
|  | ||||
|     # ✅ Custom function for case-insensitive regex matching | ||||
|     def contains_regex(_, text, pattern): | ||||
|         """Returns True if `text` contains `pattern` (case-insensitive regex match).""" | ||||
|         return bool(re.search(pattern, str(text), re.IGNORECASE)) | ||||
|  | ||||
|     # ✅ Custom function for NOT matching case-insensitive regex | ||||
|     def not_contains_regex(_, text, pattern): | ||||
|         """Returns True if `text` does NOT contain `pattern` (case-insensitive regex match).""" | ||||
|         return not bool(re.search(pattern, str(text), re.IGNORECASE)) | ||||
|  | ||||
|     return { | ||||
|         "!contains_regex": not_contains_regex, | ||||
|         "contains_regex": contains_regex, | ||||
|         "ends_with": ends_with, | ||||
|         "length_max": length_max, | ||||
|         "length_min": length_min, | ||||
|         "starts_with": starts_with, | ||||
|     } | ||||
|  | ||||
| @hookimpl | ||||
| def register_operator_choices(): | ||||
|     return [ | ||||
|         ("starts_with", "Text Starts With"), | ||||
|         ("ends_with", "Text Ends With"), | ||||
|         ("length_min", "Length minimum"), | ||||
|         ("length_max", "Length maximum"), | ||||
|         ("contains_regex", "Text Matches Regex"), | ||||
|         ("!contains_regex", "Text Does NOT Match Regex"), | ||||
|     ] | ||||
|  | ||||
| @hookimpl | ||||
| def register_field_choices(): | ||||
|     return [ | ||||
|         ("extracted_number", "Extracted number after 'Filters & Triggers'"), | ||||
| #        ("meta_description", "Meta Description"), | ||||
| #        ("meta_keywords", "Meta Keywords"), | ||||
|         ("page_filtered_text", "Page text after 'Filters & Triggers'"), | ||||
|         #("page_title", "Page <title>"), # actual page title <title> | ||||
|     ] | ||||
|  | ||||
| @hookimpl | ||||
| def add_data(current_watch_uuid, application_datastruct, ephemeral_data): | ||||
|  | ||||
|     res = {} | ||||
|     if 'text' in ephemeral_data: | ||||
|         res['page_filtered_text'] = ephemeral_data['text'] | ||||
|  | ||||
|         # Better to not wrap this in try/except so that the UI can see any errors | ||||
|         price = Price.fromstring(ephemeral_data.get('text')) | ||||
|         if price and price.amount != None: | ||||
|             # This is slightly misleading, it's extracting a PRICE not a Number.. | ||||
|             res['extracted_number'] = float(price.amount) | ||||
|             logger.debug(f"Extracted number result: '{price}' - returning float({res['extracted_number']})") | ||||
|  | ||||
|     return res | ||||
							
								
								
									
										6
									
								
								changedetectionio/conditions/exceptions.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,6 @@ | ||||
| class EmptyConditionRuleRowNotUsable(Exception): | ||||
|     def __init__(self): | ||||
|         super().__init__("One of the 'conditions' rulesets is incomplete, cannot run.") | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.args[0] | ||||
							
								
								
									
										44
									
								
								changedetectionio/conditions/form.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,44 @@ | ||||
| # Condition Rule Form (for each rule row) | ||||
| from wtforms import Form, SelectField, StringField, validators | ||||
| from wtforms import validators | ||||
|  | ||||
| class ConditionFormRow(Form): | ||||
|  | ||||
|     # ✅ Ensure Plugins Are Loaded BEFORE Importing Choices | ||||
|     from changedetectionio.conditions import plugin_manager | ||||
|     from changedetectionio.conditions import operator_choices, field_choices | ||||
|     field = SelectField( | ||||
|         "Field", | ||||
|         choices=field_choices, | ||||
|         validators=[validators.Optional()] | ||||
|     ) | ||||
|  | ||||
|     operator = SelectField( | ||||
|         "Operator", | ||||
|         choices=operator_choices, | ||||
|         validators=[validators.Optional()] | ||||
|     ) | ||||
|  | ||||
|     value = StringField("Value", validators=[validators.Optional()], render_kw={"placeholder": "A value"}) | ||||
|  | ||||
|     def validate(self, extra_validators=None): | ||||
|         # First, run the default validators | ||||
|         if not super().validate(extra_validators): | ||||
|             return False | ||||
|  | ||||
|         # Custom validation logic | ||||
|         # If any of the operator/field/value is set, then they must be all set | ||||
|         if any(value not in ("", False, "None", None) for value in [self.operator.data, self.field.data, self.value.data]): | ||||
|             if not self.operator.data or self.operator.data == 'None': | ||||
|                 self.operator.errors.append("Operator is required.") | ||||
|                 return False | ||||
|  | ||||
|             if not self.field.data or self.field.data == 'None': | ||||
|                 self.field.errors.append("Field is required.") | ||||
|                 return False | ||||
|  | ||||
|             if not self.value.data: | ||||
|                 self.value.errors.append("Value is required.") | ||||
|                 return False | ||||
|  | ||||
|         return True  # Only return True if all conditions pass | ||||
							
								
								
									
										44
									
								
								changedetectionio/conditions/pluggy_interface.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,44 @@ | ||||
| import pluggy | ||||
| from . import default_plugin  # Import the default plugin | ||||
|  | ||||
| # ✅ Ensure that the namespace in HookspecMarker matches PluginManager | ||||
| PLUGIN_NAMESPACE = "changedetectionio_conditions" | ||||
|  | ||||
| hookspec = pluggy.HookspecMarker(PLUGIN_NAMESPACE) | ||||
| hookimpl = pluggy.HookimplMarker(PLUGIN_NAMESPACE) | ||||
|  | ||||
|  | ||||
| class ConditionsSpec: | ||||
|     """Hook specifications for extending JSON Logic conditions.""" | ||||
|  | ||||
|     @hookspec | ||||
|     def register_operators(): | ||||
|         """Return a dictionary of new JSON Logic operators.""" | ||||
|         pass | ||||
|  | ||||
|     @hookspec | ||||
|     def register_operator_choices(): | ||||
|         """Return a list of new operator choices.""" | ||||
|         pass | ||||
|  | ||||
|     @hookspec | ||||
|     def register_field_choices(): | ||||
|         """Return a list of new field choices.""" | ||||
|         pass | ||||
|  | ||||
|     @hookspec | ||||
|     def add_data(current_watch_uuid, application_datastruct, ephemeral_data): | ||||
|         """Add to the datadict""" | ||||
|         pass | ||||
|  | ||||
| # ✅ Set up Pluggy Plugin Manager | ||||
| plugin_manager = pluggy.PluginManager(PLUGIN_NAMESPACE) | ||||
|  | ||||
| # ✅ Register hookspecs (Ensures they are detected) | ||||
| plugin_manager.add_hookspecs(ConditionsSpec) | ||||
|  | ||||
| # ✅ Register built-in plugins manually | ||||
| plugin_manager.register(default_plugin, "default_plugin") | ||||
|  | ||||
| # ✅ Discover installed plugins from external packages (if any) | ||||
| plugin_manager.load_setuptools_entrypoints(PLUGIN_NAMESPACE) | ||||
| @@ -4,7 +4,9 @@ from loguru import logger | ||||
| from changedetectionio.content_fetchers.exceptions import BrowserStepsStepException | ||||
| import os | ||||
|  | ||||
| visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4,header,footer,section,article,aside,details,main,nav,section,summary' | ||||
| # Visual Selector scraper - 'Button' is there because some sites have <button>OUT OF STOCK</button>. | ||||
| visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4,header,footer,section,article,aside,details,main,nav,section,summary,button' | ||||
|  | ||||
|  | ||||
| # available_fetchers() will scan this implementation looking for anything starting with html_ | ||||
| # this information is used in the form selections | ||||
|   | ||||
							
								
								
									
										104
									
								
								changedetectionio/content_fetchers/helpers.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,104 @@ | ||||
|  | ||||
| # Pages with a vertical height longer than this will use the 'stitch together' method. | ||||
|  | ||||
| # - Many GPUs have a max texture size of 16384x16384px (or lower on older devices). | ||||
| # - If a page is taller than ~8000–10000px, it risks exceeding GPU memory limits. | ||||
| # - This is especially important on headless Chromium, where Playwright may fail to allocate a massive full-page buffer. | ||||
|  | ||||
|  | ||||
| # The size at which we will switch to stitching method | ||||
| SCREENSHOT_SIZE_STITCH_THRESHOLD=8000 | ||||
|  | ||||
| from loguru import logger | ||||
|  | ||||
| def capture_stitched_together_full_page(page): | ||||
|     import io | ||||
|     import os | ||||
|     import time | ||||
|     from PIL import Image, ImageDraw, ImageFont | ||||
|  | ||||
|     MAX_TOTAL_HEIGHT = SCREENSHOT_SIZE_STITCH_THRESHOLD*4  # Maximum total height for the final image (When in stitch mode) | ||||
|     MAX_CHUNK_HEIGHT = 4000  # Height per screenshot chunk | ||||
|     WARNING_TEXT_HEIGHT = 20  # Height of the warning text overlay | ||||
|  | ||||
|     # Save the original viewport size | ||||
|     original_viewport = page.viewport_size | ||||
|     now = time.time() | ||||
|  | ||||
|     try: | ||||
|         viewport = page.viewport_size | ||||
|         page_height = page.evaluate("document.documentElement.scrollHeight") | ||||
|  | ||||
|         # Limit the total capture height | ||||
|         capture_height = min(page_height, MAX_TOTAL_HEIGHT) | ||||
|  | ||||
|         images = [] | ||||
|         total_captured_height = 0 | ||||
|  | ||||
|         for offset in range(0, capture_height, MAX_CHUNK_HEIGHT): | ||||
|             # Ensure we do not exceed the total height limit | ||||
|             chunk_height = min(MAX_CHUNK_HEIGHT, MAX_TOTAL_HEIGHT - total_captured_height) | ||||
|  | ||||
|             # Adjust viewport size for this chunk | ||||
|             page.set_viewport_size({"width": viewport["width"], "height": chunk_height}) | ||||
|  | ||||
|             # Scroll to the correct position | ||||
|             page.evaluate(f"window.scrollTo(0, {offset})") | ||||
|  | ||||
|             # Capture screenshot chunk | ||||
|             screenshot_bytes = page.screenshot(type='jpeg', quality=int(os.getenv("SCREENSHOT_QUALITY", 30))) | ||||
|             images.append(Image.open(io.BytesIO(screenshot_bytes))) | ||||
|  | ||||
|             total_captured_height += chunk_height | ||||
|  | ||||
|             # Stop if we reached the maximum total height | ||||
|             if total_captured_height >= MAX_TOTAL_HEIGHT: | ||||
|                 break | ||||
|  | ||||
|         # Create the final stitched image | ||||
|         stitched_image = Image.new('RGB', (viewport["width"], total_captured_height)) | ||||
|         y_offset = 0 | ||||
|  | ||||
|         # Stitch the screenshot chunks together | ||||
|         for img in images: | ||||
|             stitched_image.paste(img, (0, y_offset)) | ||||
|             y_offset += img.height | ||||
|  | ||||
|         logger.debug(f"Screenshot stitched together in {time.time()-now:.2f}s") | ||||
|  | ||||
|         # Overlay warning text if the screenshot was trimmed | ||||
|         if page_height > MAX_TOTAL_HEIGHT: | ||||
|             draw = ImageDraw.Draw(stitched_image) | ||||
|             warning_text = f"WARNING: Screenshot was {page_height}px but trimmed to {MAX_TOTAL_HEIGHT}px because it was too long" | ||||
|  | ||||
|             # Load font (default system font if Arial is unavailable) | ||||
|             try: | ||||
|                 font = ImageFont.truetype("arial.ttf", WARNING_TEXT_HEIGHT)  # Arial (Windows/Mac) | ||||
|             except IOError: | ||||
|                 font = ImageFont.load_default()  # Default font if Arial not found | ||||
|  | ||||
|             # Get text bounding box (correct method for newer Pillow versions) | ||||
|             text_bbox = draw.textbbox((0, 0), warning_text, font=font) | ||||
|             text_width = text_bbox[2] - text_bbox[0]  # Calculate text width | ||||
|             text_height = text_bbox[3] - text_bbox[1]  # Calculate text height | ||||
|  | ||||
|             # Define background rectangle (top of the image) | ||||
|             draw.rectangle([(0, 0), (viewport["width"], WARNING_TEXT_HEIGHT)], fill="white") | ||||
|  | ||||
|             # Center text horizontally within the warning area | ||||
|             text_x = (viewport["width"] - text_width) // 2 | ||||
|             text_y = (WARNING_TEXT_HEIGHT - text_height) // 2 | ||||
|  | ||||
|             # Draw the warning text in red | ||||
|             draw.text((text_x, text_y), warning_text, fill="red", font=font) | ||||
|  | ||||
|         # Save or return the final image | ||||
|         output = io.BytesIO() | ||||
|         stitched_image.save(output, format="JPEG", quality=int(os.getenv("SCREENSHOT_QUALITY", 30))) | ||||
|         screenshot = output.getvalue() | ||||
|  | ||||
|     finally: | ||||
|         # Restore the original viewport size | ||||
|         page.set_viewport_size(original_viewport) | ||||
|  | ||||
|     return screenshot | ||||
| @@ -4,6 +4,7 @@ from urllib.parse import urlparse | ||||
|  | ||||
| from loguru import logger | ||||
|  | ||||
| from changedetectionio.content_fetchers.helpers import capture_stitched_together_full_page, SCREENSHOT_SIZE_STITCH_THRESHOLD | ||||
| from changedetectionio.content_fetchers.base import Fetcher, manage_user_agent | ||||
| from changedetectionio.content_fetchers.exceptions import PageUnloadable, Non200ErrorCodeReceived, EmptyReply, ScreenshotUnavailable | ||||
|  | ||||
| @@ -89,6 +90,7 @@ class fetcher(Fetcher): | ||||
|         from playwright.sync_api import sync_playwright | ||||
|         import playwright._impl._errors | ||||
|         from changedetectionio.content_fetchers import visualselector_xpath_selectors | ||||
|         import time | ||||
|         self.delete_browser_steps_screenshots() | ||||
|         response = None | ||||
|  | ||||
| @@ -179,6 +181,7 @@ class fetcher(Fetcher): | ||||
|  | ||||
|             self.page.wait_for_timeout(extra_wait * 1000) | ||||
|  | ||||
|             now = time.time() | ||||
|             # So we can find an element on the page where its selector was entered manually (maybe not xPath etc) | ||||
|             if current_include_filters is not None: | ||||
|                 self.page.evaluate("var include_filters={}".format(json.dumps(current_include_filters))) | ||||
| @@ -190,6 +193,8 @@ class fetcher(Fetcher): | ||||
|             self.instock_data = self.page.evaluate("async () => {" + self.instock_data_js + "}") | ||||
|  | ||||
|             self.content = self.page.content() | ||||
|             logger.debug(f"Time to scrape xpath element data in browser {time.time() - now:.2f}s") | ||||
|  | ||||
|             # Bug 3 in Playwright screenshot handling | ||||
|             # Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it | ||||
|             # JPEG is better here because the screenshots can be very very large | ||||
| @@ -199,10 +204,15 @@ class fetcher(Fetcher): | ||||
|             # acceptable screenshot quality here | ||||
|             try: | ||||
|                 # The actual screenshot - this always base64 and needs decoding! horrible! huge CPU usage | ||||
|                 self.screenshot = self.page.screenshot(type='jpeg', | ||||
|                                                        full_page=True, | ||||
|                                                        quality=int(os.getenv("SCREENSHOT_QUALITY", 72)), | ||||
|                                                        ) | ||||
|                 full_height = self.page.evaluate("document.documentElement.scrollHeight") | ||||
|  | ||||
|                 if full_height >= SCREENSHOT_SIZE_STITCH_THRESHOLD: | ||||
|                     logger.warning( | ||||
|                         f"Page full Height: {full_height}px longer than {SCREENSHOT_SIZE_STITCH_THRESHOLD}px, using 'stitched screenshot method'.") | ||||
|                     self.screenshot = capture_stitched_together_full_page(self.page) | ||||
|                 else: | ||||
|                     self.screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=int(os.getenv("SCREENSHOT_QUALITY", 30))) | ||||
|  | ||||
|             except Exception as e: | ||||
|                 # It's likely the screenshot was too long/big and something crashed | ||||
|                 raise ScreenshotUnavailable(url=url, status_code=self.status_code) | ||||
|   | ||||
| @@ -75,6 +75,7 @@ class fetcher(Fetcher): | ||||
|         self.headers = r.headers | ||||
|  | ||||
|         if not r.content or not len(r.content): | ||||
|             logger.debug(f"Requests returned empty content for '{url}'") | ||||
|             if not empty_pages_are_a_change: | ||||
|                 raise EmptyReply(url=url, status_code=r.status_code) | ||||
|             else: | ||||
|   | ||||
| @@ -29,7 +29,12 @@ function isItemInStock() { | ||||
|         'currently unavailable', | ||||
|         'dieser artikel ist bald wieder verfügbar', | ||||
|         'dostępne wkrótce', | ||||
|         'en rupture', | ||||
|         'en rupture de stock', | ||||
|         'épuisé', | ||||
|         'esgotado', | ||||
|         'indisponible', | ||||
|         'indisponível', | ||||
|         'isn\'t in stock right now', | ||||
|         'isnt in stock right now', | ||||
|         'isn’t in stock right now', | ||||
| @@ -37,6 +42,7 @@ function isItemInStock() { | ||||
|         'let me know when it\'s available', | ||||
|         'mail me when available', | ||||
|         'message if back in stock', | ||||
|         'mevcut değil', | ||||
|         'nachricht bei', | ||||
|         'nicht auf lager', | ||||
|         'nicht lagernd', | ||||
| @@ -48,7 +54,9 @@ function isItemInStock() { | ||||
|         'niet beschikbaar', | ||||
|         'niet leverbaar', | ||||
|         'niet op voorraad', | ||||
|         'no disponible temporalmente', | ||||
|         'no disponible', | ||||
|         'non disponibile', | ||||
|         'non disponible', | ||||
|         'no longer in stock', | ||||
|         'no tickets available', | ||||
|         'not available', | ||||
| @@ -57,19 +65,30 @@ function isItemInStock() { | ||||
|         'notify me when available', | ||||
|         'notify me', | ||||
|         'notify when available', | ||||
|         'não disponível', | ||||
|         'não estamos a aceitar encomendas', | ||||
|         'out of stock', | ||||
|         'out-of-stock', | ||||
|         'plus disponible', | ||||
|         'prodotto esaurito', | ||||
|         'produkt niedostępny', | ||||
|         'rupture', | ||||
|         'sold out', | ||||
|         'sold-out', | ||||
|         'stok habis', | ||||
|         'stok kosong', | ||||
|         'stok varian ini habis', | ||||
|         'stokta yok', | ||||
|         'temporarily out of stock', | ||||
|         'temporarily unavailable', | ||||
|         'there were no search results for', | ||||
|         'this item is currently unavailable', | ||||
|         'tickets unavailable', | ||||
|         'tidak dijual', | ||||
|         'tidak tersedia', | ||||
|         'tijdelijk uitverkocht', | ||||
|         'tiket tidak tersedia', | ||||
|         'tükendi', | ||||
|         'unavailable nearby', | ||||
|         'unavailable tickets', | ||||
|         'vergriffen', | ||||
| @@ -154,10 +173,14 @@ function isItemInStock() { | ||||
|         } | ||||
|  | ||||
|         elementText = ""; | ||||
|         if (element.tagName.toLowerCase() === "input") { | ||||
|             elementText = element.value.toLowerCase().trim(); | ||||
|         } else { | ||||
|             elementText = getElementBaseText(element); | ||||
|         try { | ||||
|             if (element.tagName.toLowerCase() === "input") { | ||||
|                 elementText = element.value.toLowerCase().trim(); | ||||
|             } else { | ||||
|                 elementText = getElementBaseText(element); | ||||
|             } | ||||
|         } catch (e) { | ||||
|             console.warn('stock-not-in-stock.js scraper - handling element for gettext failed', e); | ||||
|         } | ||||
|  | ||||
|         if (elementText.length) { | ||||
|   | ||||
| @@ -41,7 +41,7 @@ const findUpTag = (el) => { | ||||
|  | ||||
|     //  Strategy 1: If it's an input, with name, and there's only one, prefer that | ||||
|     if (el.name !== undefined && el.name.length) { | ||||
|         var proposed = el.tagName + "[name=" + el.name + "]"; | ||||
|         var proposed = el.tagName + "[name=\"" + CSS.escape(el.name) + "\"]"; | ||||
|         var proposed_element = window.document.querySelectorAll(proposed); | ||||
|         if (proposed_element.length) { | ||||
|             if (proposed_element.length === 1) { | ||||
| @@ -102,13 +102,15 @@ function collectVisibleElements(parent, visibleElements) { | ||||
|     const children = parent.children; | ||||
|     for (let i = 0; i < children.length; i++) { | ||||
|         const child = children[i]; | ||||
|         const computedStyle = window.getComputedStyle(child); | ||||
|  | ||||
|         if ( | ||||
|             child.nodeType === Node.ELEMENT_NODE && | ||||
|             window.getComputedStyle(child).display !== 'none' && | ||||
|             window.getComputedStyle(child).visibility !== 'hidden' && | ||||
|             computedStyle.display !== 'none' && | ||||
|             computedStyle.visibility !== 'hidden' && | ||||
|             child.offsetWidth >= 0 && | ||||
|             child.offsetHeight >= 0 && | ||||
|             window.getComputedStyle(child).contentVisibility !== 'hidden' | ||||
|             computedStyle.contentVisibility !== 'hidden' | ||||
|         ) { | ||||
|             // If the child is an element and is visible, recursively collect visible elements | ||||
|             collectVisibleElements(child, visibleElements); | ||||
| @@ -173,6 +175,7 @@ visibleElementsArray.forEach(function (element) { | ||||
|  | ||||
|     // Try to identify any possible currency amounts "Sale: 4000" or "Sale now 3000 Kc", can help with the training. | ||||
|     const hasDigitCurrency = (/\d/.test(text.slice(0, 6)) || /\d/.test(text.slice(-6)) ) &&  /([€£$¥₩₹]|USD|AUD|EUR|Kč|kr|SEK|,–)/.test(text) ; | ||||
|     const computedStyle = window.getComputedStyle(element); | ||||
|  | ||||
|     size_pos.push({ | ||||
|         xpath: xpath_result, | ||||
| @@ -184,10 +187,10 @@ visibleElementsArray.forEach(function (element) { | ||||
|         tagName: (element.tagName) ? element.tagName.toLowerCase() : '', | ||||
|         // tagtype used by Browser Steps | ||||
|         tagtype: (element.tagName.toLowerCase() === 'input' && element.type) ? element.type.toLowerCase() : '', | ||||
|         isClickable: window.getComputedStyle(element).cursor === "pointer", | ||||
|         isClickable: computedStyle.cursor === "pointer", | ||||
|         // Used by the keras trainer | ||||
|         fontSize: window.getComputedStyle(element).getPropertyValue('font-size'), | ||||
|         fontWeight: window.getComputedStyle(element).getPropertyValue('font-weight'), | ||||
|         fontSize: computedStyle.getPropertyValue('font-size'), | ||||
|         fontWeight: computedStyle.getPropertyValue('font-weight'), | ||||
|         hasDigitCurrency: hasDigitCurrency, | ||||
|         label: label, | ||||
|     }); | ||||
|   | ||||
| @@ -1,6 +1,9 @@ | ||||
| import difflib | ||||
| from typing import List, Iterator, Union | ||||
|  | ||||
| REMOVED_STYLE = "background-color: #fadad7; color: #b30000;" | ||||
| ADDED_STYLE = "background-color: #eaf2c2; color: #406619;" | ||||
|  | ||||
| def same_slicer(lst: List[str], start: int, end: int) -> List[str]: | ||||
|     """Return a slice of the list, or a single element if start == end.""" | ||||
|     return lst[start:end] if start != end else [lst[start]] | ||||
| @@ -12,11 +15,12 @@ def customSequenceMatcher( | ||||
|     include_removed: bool = True, | ||||
|     include_added: bool = True, | ||||
|     include_replaced: bool = True, | ||||
|     include_change_type_prefix: bool = True | ||||
|     include_change_type_prefix: bool = True, | ||||
|     html_colour: bool = False | ||||
| ) -> Iterator[List[str]]: | ||||
|     """ | ||||
|     Compare two sequences and yield differences based on specified parameters. | ||||
|      | ||||
|  | ||||
|     Args: | ||||
|         before (List[str]): Original sequence | ||||
|         after (List[str]): Modified sequence | ||||
| @@ -25,26 +29,35 @@ def customSequenceMatcher( | ||||
|         include_added (bool): Include added parts | ||||
|         include_replaced (bool): Include replaced parts | ||||
|         include_change_type_prefix (bool): Add prefixes to indicate change types | ||||
|      | ||||
|         html_colour (bool): Use HTML background colors for differences | ||||
|  | ||||
|     Yields: | ||||
|         List[str]: Differences between sequences | ||||
|     """ | ||||
|     cruncher = difflib.SequenceMatcher(isjunk=lambda x: x in " \t", a=before, b=after) | ||||
|      | ||||
|  | ||||
|  | ||||
|  | ||||
|     for tag, alo, ahi, blo, bhi in cruncher.get_opcodes(): | ||||
|         if include_equal and tag == 'equal': | ||||
|             yield before[alo:ahi] | ||||
|         elif include_removed and tag == 'delete': | ||||
|             prefix = "(removed) " if include_change_type_prefix else '' | ||||
|             yield [f"{prefix}{line}" for line in same_slicer(before, alo, ahi)] | ||||
|             if html_colour: | ||||
|                 yield [f'<span style="{REMOVED_STYLE}">{line}</span>' for line in same_slicer(before, alo, ahi)] | ||||
|             else: | ||||
|                 yield [f"(removed) {line}" for line in same_slicer(before, alo, ahi)] if include_change_type_prefix else same_slicer(before, alo, ahi) | ||||
|         elif include_replaced and tag == 'replace': | ||||
|             prefix_changed = "(changed) " if include_change_type_prefix else '' | ||||
|             prefix_into = "(into) " if include_change_type_prefix else '' | ||||
|             yield [f"{prefix_changed}{line}" for line in same_slicer(before, alo, ahi)] + \ | ||||
|                   [f"{prefix_into}{line}" for line in same_slicer(after, blo, bhi)] | ||||
|             if html_colour: | ||||
|                 yield [f'<span style="{REMOVED_STYLE}">{line}</span>' for line in same_slicer(before, alo, ahi)] + \ | ||||
|                       [f'<span style="{ADDED_STYLE}">{line}</span>' for line in same_slicer(after, blo, bhi)] | ||||
|             else: | ||||
|                 yield [f"(changed) {line}" for line in same_slicer(before, alo, ahi)] + \ | ||||
|                       [f"(into) {line}" for line in same_slicer(after, blo, bhi)] if include_change_type_prefix else same_slicer(before, alo, ahi) + same_slicer(after, blo, bhi) | ||||
|         elif include_added and tag == 'insert': | ||||
|             prefix = "(added) " if include_change_type_prefix else '' | ||||
|             yield [f"{prefix}{line}" for line in same_slicer(after, blo, bhi)] | ||||
|             if html_colour: | ||||
|                 yield [f'<span style="{ADDED_STYLE}">{line}</span>' for line in same_slicer(after, blo, bhi)] | ||||
|             else: | ||||
|                 yield [f"(added) {line}" for line in same_slicer(after, blo, bhi)] if include_change_type_prefix else same_slicer(after, blo, bhi) | ||||
|  | ||||
| def render_diff( | ||||
|     previous_version_file_contents: str, | ||||
| @@ -55,11 +68,12 @@ def render_diff( | ||||
|     include_replaced: bool = True, | ||||
|     line_feed_sep: str = "\n", | ||||
|     include_change_type_prefix: bool = True, | ||||
|     patch_format: bool = False | ||||
|     patch_format: bool = False, | ||||
|     html_colour: bool = False | ||||
| ) -> str: | ||||
|     """ | ||||
|     Render the difference between two file contents. | ||||
|      | ||||
|  | ||||
|     Args: | ||||
|         previous_version_file_contents (str): Original file contents | ||||
|         newest_version_file_contents (str): Modified file contents | ||||
| @@ -70,7 +84,8 @@ def render_diff( | ||||
|         line_feed_sep (str): Separator for lines in output | ||||
|         include_change_type_prefix (bool): Add prefixes to indicate change types | ||||
|         patch_format (bool): Use patch format for output | ||||
|      | ||||
|         html_colour (bool): Use HTML background colors for differences | ||||
|  | ||||
|     Returns: | ||||
|         str: Rendered difference | ||||
|     """ | ||||
| @@ -88,10 +103,11 @@ def render_diff( | ||||
|         include_removed=include_removed, | ||||
|         include_added=include_added, | ||||
|         include_replaced=include_replaced, | ||||
|         include_change_type_prefix=include_change_type_prefix | ||||
|         include_change_type_prefix=include_change_type_prefix, | ||||
|         html_colour=html_colour | ||||
|     ) | ||||
|  | ||||
|     def flatten(lst: List[Union[str, List[str]]]) -> str: | ||||
|         return line_feed_sep.join(flatten(x) if isinstance(x, list) else x for x in lst) | ||||
|  | ||||
|     return flatten(rendered_diff) | ||||
|     return flatten(rendered_diff) | ||||
| @@ -1,11 +1,16 @@ | ||||
| import os | ||||
| import re | ||||
| from loguru import logger | ||||
| from wtforms.widgets.core import TimeInput | ||||
|  | ||||
| from changedetectionio.blueprint.rss import RSS_FORMAT_TYPES | ||||
| from changedetectionio.conditions.form import ConditionFormRow | ||||
| from changedetectionio.strtobool import strtobool | ||||
|  | ||||
| from wtforms import ( | ||||
|     BooleanField, | ||||
|     Form, | ||||
|     Field, | ||||
|     IntegerField, | ||||
|     RadioField, | ||||
|     SelectField, | ||||
| @@ -124,6 +129,87 @@ class StringTagUUID(StringField): | ||||
|  | ||||
|         return 'error' | ||||
|  | ||||
| class TimeDurationForm(Form): | ||||
|     hours = SelectField(choices=[(f"{i}", f"{i}") for i in range(0, 25)], default="24",  validators=[validators.Optional()]) | ||||
|     minutes = SelectField(choices=[(f"{i}", f"{i}") for i in range(0, 60)], default="00", validators=[validators.Optional()]) | ||||
|  | ||||
| class TimeStringField(Field): | ||||
|     """ | ||||
|     A WTForms field for time inputs (HH:MM) that stores the value as a string. | ||||
|     """ | ||||
|     widget = TimeInput()  # Use the built-in time input widget | ||||
|  | ||||
|     def _value(self): | ||||
|         """ | ||||
|         Returns the value for rendering in the form. | ||||
|         """ | ||||
|         return self.data if self.data is not None else "" | ||||
|  | ||||
|     def process_formdata(self, valuelist): | ||||
|         """ | ||||
|         Processes the raw input from the form and stores it as a string. | ||||
|         """ | ||||
|         if valuelist: | ||||
|             time_str = valuelist[0] | ||||
|             # Simple validation for HH:MM format | ||||
|             if not time_str or len(time_str.split(":")) != 2: | ||||
|                 raise ValidationError("Invalid time format. Use HH:MM.") | ||||
|             self.data = time_str | ||||
|  | ||||
|  | ||||
| class validateTimeZoneName(object): | ||||
|     """ | ||||
|        Flask wtform validators wont work with basic auth | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, message=None): | ||||
|         self.message = message | ||||
|  | ||||
|     def __call__(self, form, field): | ||||
|         from zoneinfo import available_timezones | ||||
|         python_timezones = available_timezones() | ||||
|         if field.data and field.data not in python_timezones: | ||||
|             raise ValidationError("Not a valid timezone name") | ||||
|  | ||||
| class ScheduleLimitDaySubForm(Form): | ||||
|     enabled = BooleanField("not set", default=True) | ||||
|     start_time = TimeStringField("Start At", default="00:00", validators=[validators.Optional()]) | ||||
|     duration = FormField(TimeDurationForm, label="Run duration") | ||||
|  | ||||
| class ScheduleLimitForm(Form): | ||||
|     enabled = BooleanField("Use time scheduler", default=False) | ||||
|     # Because the label for=""" doesnt line up/work with the actual checkbox | ||||
|     monday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|     tuesday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|     wednesday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|     thursday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|     friday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|     saturday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|     sunday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|  | ||||
|     timezone = StringField("Optional timezone to run in", | ||||
|                                   render_kw={"list": "timezones"}, | ||||
|                                   validators=[validateTimeZoneName()] | ||||
|                                   ) | ||||
|     def __init__( | ||||
|         self, | ||||
|         formdata=None, | ||||
|         obj=None, | ||||
|         prefix="", | ||||
|         data=None, | ||||
|         meta=None, | ||||
|         **kwargs, | ||||
|     ): | ||||
|         super().__init__(formdata, obj, prefix, data, meta, **kwargs) | ||||
|         self.monday.form.enabled.label.text="Monday" | ||||
|         self.tuesday.form.enabled.label.text = "Tuesday" | ||||
|         self.wednesday.form.enabled.label.text = "Wednesday" | ||||
|         self.thursday.form.enabled.label.text = "Thursday" | ||||
|         self.friday.form.enabled.label.text = "Friday" | ||||
|         self.saturday.form.enabled.label.text = "Saturday" | ||||
|         self.sunday.form.enabled.label.text = "Sunday" | ||||
|  | ||||
|  | ||||
| class TimeBetweenCheckForm(Form): | ||||
|     weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")]) | ||||
|     days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")]) | ||||
| @@ -220,12 +306,18 @@ class ValidateAppRiseServers(object): | ||||
|  | ||||
|     def __call__(self, form, field): | ||||
|         import apprise | ||||
|         apobj = apprise.Apprise() | ||||
|         # so that the custom endpoints are registered | ||||
|         from changedetectionio.apprise_plugin import apprise_custom_api_call_wrapper | ||||
|         from .apprise_plugin.assets import apprise_asset | ||||
|         from .apprise_plugin.custom_handlers import apprise_http_custom_handler  # noqa: F401 | ||||
|  | ||||
|         apobj = apprise.Apprise(asset=apprise_asset) | ||||
|  | ||||
|         for server_url in field.data: | ||||
|             if not apobj.add(server_url): | ||||
|                 message = field.gettext('\'%s\' is not a valid AppRise URL.' % (server_url)) | ||||
|             url = server_url.strip() | ||||
|             if url.startswith("#"): | ||||
|                 continue | ||||
|  | ||||
|             if not apobj.add(url): | ||||
|                 message = field.gettext('\'%s\' is not a valid AppRise URL.' % (url)) | ||||
|                 raise ValidationError(message) | ||||
|  | ||||
| class ValidateJinja2Template(object): | ||||
| @@ -278,6 +370,7 @@ class validateURL(object): | ||||
|         # This should raise a ValidationError() or not | ||||
|         validate_url(field.data) | ||||
|  | ||||
|  | ||||
| def validate_url(test_url): | ||||
|     # If hosts that only contain alphanumerics are allowed ("localhost" for example) | ||||
|     try: | ||||
| @@ -420,6 +513,7 @@ class quickWatchForm(Form): | ||||
|     edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|  | ||||
|  | ||||
|  | ||||
| # Common to a single watch and the global settings | ||||
| class commonSettingsForm(Form): | ||||
|     from . import processors | ||||
| @@ -437,6 +531,7 @@ class commonSettingsForm(Form): | ||||
|     notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()]) | ||||
|     notification_urls = StringListField('Notification URL List', validators=[validators.Optional(), ValidateAppRiseServers(), ValidateJinja2Template()]) | ||||
|     processor = RadioField( label=u"Processor - What do you want to achieve?", choices=processors.available_processors(), default="text_json_diff") | ||||
|     timezone = StringField("Timezone for watch schedule", render_kw={"list": "timezones"}, validators=[validateTimeZoneName()]) | ||||
|     webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, message="Should contain one or more seconds")]) | ||||
|  | ||||
|  | ||||
| @@ -447,7 +542,6 @@ class importForm(Form): | ||||
|     xlsx_file = FileField('Upload .xlsx file', validators=[FileAllowed(['xlsx'], 'Must be .xlsx file!')]) | ||||
|     file_mapping = SelectField('File mapping', [validators.DataRequired()], choices={('wachete', 'Wachete mapping'), ('custom','Custom mapping')}) | ||||
|  | ||||
|  | ||||
| class SingleBrowserStep(Form): | ||||
|  | ||||
|     operation = SelectField('Operation', [validators.Optional()], choices=browser_step_ui_config.keys()) | ||||
| @@ -465,6 +559,9 @@ class processor_text_json_diff_form(commonSettingsForm): | ||||
|     tags = StringTagUUID('Group tag', [validators.Optional()], default='') | ||||
|  | ||||
|     time_between_check = FormField(TimeBetweenCheckForm) | ||||
|  | ||||
|     time_schedule_limit = FormField(ScheduleLimitForm) | ||||
|  | ||||
|     time_between_check_use_default = BooleanField('Use global settings for time between check', default=False) | ||||
|  | ||||
|     include_filters = StringListField('CSS/JSONPath/JQ/XPath Filters', [ValidateCSSJSONXPATHInput()], default='') | ||||
| @@ -475,7 +572,7 @@ class processor_text_json_diff_form(commonSettingsForm): | ||||
|  | ||||
|     title = StringField('Title', default='') | ||||
|  | ||||
|     ignore_text = StringListField('Ignore text', [ValidateListRegex()]) | ||||
|     ignore_text = StringListField('Ignore lines containing', [ValidateListRegex()]) | ||||
|     headers = StringDictKeyValue('Request headers') | ||||
|     body = TextAreaField('Request body', [validators.Optional()]) | ||||
|     method = SelectField('Request method', choices=valid_method, default=default_method) | ||||
| @@ -495,7 +592,7 @@ class processor_text_json_diff_form(commonSettingsForm): | ||||
|     text_should_not_be_present = StringListField('Block change-detection while text matches', [validators.Optional(), ValidateListRegex()]) | ||||
|     webdriver_js_execute_code = TextAreaField('Execute JavaScript before change detection', render_kw={"rows": "5"}, validators=[validators.Optional()]) | ||||
|  | ||||
|     save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|     save_button = SubmitField('Save', render_kw={"class": "pure-button button-small pure-button-primary"}) | ||||
|  | ||||
|     proxy = RadioField('Proxy') | ||||
|     filter_failure_notification_send = BooleanField( | ||||
| @@ -504,6 +601,10 @@ class processor_text_json_diff_form(commonSettingsForm): | ||||
|     notification_muted = BooleanField('Notifications Muted / Off', default=False) | ||||
|     notification_screenshot = BooleanField('Attach screenshot to notification (where possible)', default=False) | ||||
|  | ||||
|     conditions_match_logic = RadioField(u'Match', choices=[('ALL', 'Match all of the following'),('ANY', 'Match any of the following')], default='ALL') | ||||
|     conditions = FieldList(FormField(ConditionFormRow), min_entries=1)  # Add rule logic here | ||||
|  | ||||
|  | ||||
|     def extra_tab_content(self): | ||||
|         return None | ||||
|  | ||||
| @@ -514,6 +615,7 @@ class processor_text_json_diff_form(commonSettingsForm): | ||||
|         if not super().validate(): | ||||
|             return False | ||||
|  | ||||
|         from changedetectionio.safe_jinja import render as jinja_render | ||||
|         result = True | ||||
|  | ||||
|         # Fail form validation when a body is set for a GET | ||||
| @@ -523,13 +625,65 @@ class processor_text_json_diff_form(commonSettingsForm): | ||||
|  | ||||
|         # Attempt to validate jinja2 templates in the URL | ||||
|         try: | ||||
|             from changedetectionio.safe_jinja import render as jinja_render | ||||
|             jinja_render(template_str=self.url.data) | ||||
|         except Exception as e: | ||||
|             self.url.errors.append('Invalid template syntax') | ||||
|         except ModuleNotFoundError as e: | ||||
|             # incase jinja2_time or others is missing | ||||
|             logger.error(e) | ||||
|             self.url.errors.append(f'Invalid template syntax configuration: {e}') | ||||
|             result = False | ||||
|         except Exception as e: | ||||
|             logger.error(e) | ||||
|             self.url.errors.append(f'Invalid template syntax: {e}') | ||||
|             result = False | ||||
|  | ||||
|         # Attempt to validate jinja2 templates in the body | ||||
|         if self.body.data and self.body.data.strip(): | ||||
|             try: | ||||
|                 jinja_render(template_str=self.body.data) | ||||
|             except ModuleNotFoundError as e: | ||||
|                 # incase jinja2_time or others is missing | ||||
|                 logger.error(e) | ||||
|                 self.body.errors.append(f'Invalid template syntax configuration: {e}') | ||||
|                 result = False | ||||
|             except Exception as e: | ||||
|                 logger.error(e) | ||||
|                 self.body.errors.append(f'Invalid template syntax: {e}') | ||||
|                 result = False | ||||
|  | ||||
|         # Attempt to validate jinja2 templates in the headers | ||||
|         if len(self.headers.data) > 0: | ||||
|             try: | ||||
|                 for header, value in self.headers.data.items(): | ||||
|                     jinja_render(template_str=value) | ||||
|             except ModuleNotFoundError as e: | ||||
|                 # incase jinja2_time or others is missing | ||||
|                 logger.error(e) | ||||
|                 self.headers.errors.append(f'Invalid template syntax configuration: {e}') | ||||
|                 result = False | ||||
|             except Exception as e: | ||||
|                 logger.error(e) | ||||
|                 self.headers.errors.append(f'Invalid template syntax in "{header}" header: {e}') | ||||
|                 result = False | ||||
|  | ||||
|         return result | ||||
|  | ||||
|     def __init__( | ||||
|             self, | ||||
|             formdata=None, | ||||
|             obj=None, | ||||
|             prefix="", | ||||
|             data=None, | ||||
|             meta=None, | ||||
|             **kwargs, | ||||
|     ): | ||||
|         super().__init__(formdata, obj, prefix, data, meta, **kwargs) | ||||
|         if kwargs and kwargs.get('default_system_settings'): | ||||
|             default_tz = kwargs.get('default_system_settings').get('application', {}).get('timezone') | ||||
|             if default_tz: | ||||
|                 self.time_schedule_limit.form.timezone.render_kw['placeholder'] = default_tz | ||||
|  | ||||
|  | ||||
|  | ||||
| class SingleExtraProxy(Form): | ||||
|  | ||||
|     # maybe better to set some <script>var.. | ||||
| @@ -550,6 +704,7 @@ class DefaultUAInputForm(Form): | ||||
| # datastore.data['settings']['requests'].. | ||||
| class globalSettingsRequestForm(Form): | ||||
|     time_between_check = FormField(TimeBetweenCheckForm) | ||||
|     time_schedule_limit = FormField(ScheduleLimitForm) | ||||
|     proxy = RadioField('Proxy') | ||||
|     jitter_seconds = IntegerField('Random jitter seconds ± check', | ||||
|                                   render_kw={"style": "width: 5em;"}, | ||||
| @@ -585,6 +740,9 @@ class globalSettingsApplicationForm(commonSettingsForm): | ||||
|                               render_kw={"style": "width: 5em;"}, | ||||
|                               validators=[validators.NumberRange(min=0, | ||||
|                                                                  message="Should be atleast zero (disabled)")]) | ||||
|  | ||||
|     rss_content_format = SelectField('RSS Content format', choices=RSS_FORMAT_TYPES) | ||||
|  | ||||
|     removepassword_button = SubmitField('Remove password', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|     render_anchor_tag_content = BooleanField('Render anchor tag content', default=False) | ||||
|     shared_diff_access = BooleanField('Allow access to view diff page when password is enabled', default=False, validators=[validators.Optional()]) | ||||
| @@ -608,7 +766,7 @@ class globalSettingsForm(Form): | ||||
|  | ||||
|     requests = FormField(globalSettingsRequestForm) | ||||
|     application = FormField(globalSettingsApplicationForm) | ||||
|     save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|     save_button = SubmitField('Save', render_kw={"class": "pure-button button-small pure-button-primary"}) | ||||
|  | ||||
|  | ||||
| class extractDataForm(Form): | ||||
|   | ||||
							
								
								
									
										162
									
								
								changedetectionio/gc_cleanup.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,162 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import ctypes | ||||
| import gc | ||||
| import re | ||||
| import psutil | ||||
| import sys | ||||
| import threading | ||||
| import importlib | ||||
| from loguru import logger | ||||
|  | ||||
| def memory_cleanup(app=None): | ||||
|     """ | ||||
|     Perform comprehensive memory cleanup operations and log memory usage | ||||
|     at each step with nicely formatted numbers. | ||||
|      | ||||
|     Args: | ||||
|         app: Optional Flask app instance for clearing Flask-specific caches | ||||
|          | ||||
|     Returns: | ||||
|         str: Status message | ||||
|     """ | ||||
|     # Get current process | ||||
|     process = psutil.Process() | ||||
|      | ||||
|     # Log initial memory usage with nicely formatted numbers | ||||
|     current_memory = process.memory_info().rss / 1024 / 1024 | ||||
|     logger.debug(f"Memory cleanup started - Current memory usage: {current_memory:,.2f} MB") | ||||
|  | ||||
|     # 1. Standard garbage collection - force full collection on all generations | ||||
|     gc.collect(0)  # Collect youngest generation | ||||
|     gc.collect(1)  # Collect middle generation | ||||
|     gc.collect(2)  # Collect oldest generation | ||||
|  | ||||
|     # Run full collection again to ensure maximum cleanup | ||||
|     gc.collect() | ||||
|     current_memory = process.memory_info().rss / 1024 / 1024 | ||||
|     logger.debug(f"After full gc.collect() - Memory usage: {current_memory:,.2f} MB") | ||||
|      | ||||
|  | ||||
|     # 3. Call libc's malloc_trim to release memory back to the OS | ||||
|     libc = ctypes.CDLL("libc.so.6") | ||||
|     libc.malloc_trim(0) | ||||
|     current_memory = process.memory_info().rss / 1024 / 1024 | ||||
|     logger.debug(f"After malloc_trim(0) - Memory usage: {current_memory:,.2f} MB") | ||||
|      | ||||
|     # 4. Clear Python's regex cache | ||||
|     re.purge() | ||||
|     current_memory = process.memory_info().rss / 1024 / 1024 | ||||
|     logger.debug(f"After re.purge() - Memory usage: {current_memory:,.2f} MB") | ||||
|  | ||||
|     # 5. Reset thread-local storage | ||||
|     # Create a new thread local object to encourage cleanup of old ones | ||||
|     threading.local() | ||||
|     current_memory = process.memory_info().rss / 1024 / 1024 | ||||
|     logger.debug(f"After threading.local() - Memory usage: {current_memory:,.2f} MB") | ||||
|  | ||||
|     # 6. Clear sys.intern cache if Python version supports it | ||||
|     try: | ||||
|         sys.intern.clear() | ||||
|         current_memory = process.memory_info().rss / 1024 / 1024 | ||||
|         logger.debug(f"After sys.intern.clear() - Memory usage: {current_memory:,.2f} MB") | ||||
|     except (AttributeError, TypeError): | ||||
|         logger.debug("sys.intern.clear() not supported in this Python version") | ||||
|      | ||||
|     # 7. Clear XML/lxml caches if available | ||||
|     try: | ||||
|         # Check if lxml.etree is in use | ||||
|         lxml_etree = sys.modules.get('lxml.etree') | ||||
|         if lxml_etree: | ||||
|             # Clear module-level caches | ||||
|             if hasattr(lxml_etree, 'clear_error_log'): | ||||
|                 lxml_etree.clear_error_log() | ||||
|              | ||||
|             # Check for _ErrorLog and _RotatingErrorLog objects and clear them | ||||
|             for obj in gc.get_objects(): | ||||
|                 if hasattr(obj, '__class__') and hasattr(obj.__class__, '__name__'): | ||||
|                     class_name = obj.__class__.__name__ | ||||
|                     if class_name in ('_ErrorLog', '_RotatingErrorLog', '_DomainErrorLog') and hasattr(obj, 'clear'): | ||||
|                         try: | ||||
|                             obj.clear() | ||||
|                         except (AttributeError, TypeError): | ||||
|                             pass | ||||
|                      | ||||
|                     # Clear Element objects which can hold references to documents | ||||
|                     elif class_name in ('_Element', 'ElementBase') and hasattr(obj, 'clear'): | ||||
|                         try: | ||||
|                             obj.clear() | ||||
|                         except (AttributeError, TypeError): | ||||
|                             pass | ||||
|              | ||||
|             current_memory = process.memory_info().rss / 1024 / 1024 | ||||
|             logger.debug(f"After lxml.etree cleanup - Memory usage: {current_memory:,.2f} MB") | ||||
|  | ||||
|         # Check if lxml.html is in use | ||||
|         lxml_html = sys.modules.get('lxml.html') | ||||
|         if lxml_html: | ||||
|             # Clear HTML-specific element types | ||||
|             for obj in gc.get_objects(): | ||||
|                 if hasattr(obj, '__class__') and hasattr(obj.__class__, '__name__'): | ||||
|                     class_name = obj.__class__.__name__ | ||||
|                     if class_name in ('HtmlElement', 'FormElement', 'InputElement', | ||||
|                                     'SelectElement', 'TextareaElement', 'CheckboxGroup', | ||||
|                                     'RadioGroup', 'MultipleSelectOptions', 'FieldsDict') and hasattr(obj, 'clear'): | ||||
|                         try: | ||||
|                             obj.clear() | ||||
|                         except (AttributeError, TypeError): | ||||
|                             pass | ||||
|  | ||||
|             current_memory = process.memory_info().rss / 1024 / 1024 | ||||
|             logger.debug(f"After lxml.html cleanup - Memory usage: {current_memory:,.2f} MB") | ||||
|     except (ImportError, AttributeError): | ||||
|         logger.debug("lxml cleanup not applicable") | ||||
|      | ||||
|     # 8. Clear JSON parser caches if applicable | ||||
|     try: | ||||
|         # Check if json module is being used and try to clear its cache | ||||
|         json_module = sys.modules.get('json') | ||||
|         if json_module and hasattr(json_module, '_default_encoder'): | ||||
|             json_module._default_encoder.markers.clear() | ||||
|             current_memory = process.memory_info().rss / 1024 / 1024 | ||||
|             logger.debug(f"After JSON parser cleanup - Memory usage: {current_memory:,.2f} MB") | ||||
|     except (AttributeError, KeyError): | ||||
|         logger.debug("JSON cleanup not applicable") | ||||
|      | ||||
|     # 9. Force Python's memory allocator to release unused memory | ||||
|     try: | ||||
|         if hasattr(sys, 'pypy_version_info'): | ||||
|             # PyPy has different memory management | ||||
|             gc.collect() | ||||
|         else: | ||||
|             # CPython - try to release unused memory | ||||
|             ctypes.pythonapi.PyGC_Collect() | ||||
|             current_memory = process.memory_info().rss / 1024 / 1024 | ||||
|             logger.debug(f"After PyGC_Collect - Memory usage: {current_memory:,.2f} MB") | ||||
|     except (AttributeError, TypeError): | ||||
|         logger.debug("PyGC_Collect not supported") | ||||
|      | ||||
|     # 10. Clear Flask-specific caches if applicable | ||||
|     if app: | ||||
|         try: | ||||
|             # Clear Flask caches if they exist | ||||
|             for key in list(app.config.get('_cache', {}).keys()): | ||||
|                 app.config['_cache'].pop(key, None) | ||||
|              | ||||
|             # Clear Jinja2 template cache if available | ||||
|             if hasattr(app, 'jinja_env') and hasattr(app.jinja_env, 'cache'): | ||||
|                 app.jinja_env.cache.clear() | ||||
|              | ||||
|             current_memory = process.memory_info().rss / 1024 / 1024 | ||||
|             logger.debug(f"After Flask cache clear - Memory usage: {current_memory:,.2f} MB") | ||||
|         except (AttributeError, KeyError): | ||||
|             logger.debug("No Flask cache to clear") | ||||
|      | ||||
|     # Final garbage collection pass | ||||
|     gc.collect() | ||||
|     libc.malloc_trim(0) | ||||
|      | ||||
|     # Log final memory usage | ||||
|     final_memory = process.memory_info().rss / 1024 / 1024 | ||||
|     logger.info(f"Memory cleanup completed - Final memory usage: {final_memory:,.2f} MB") | ||||
|     return "cleaned" | ||||
| @@ -1,13 +1,14 @@ | ||||
| from typing import List | ||||
| from loguru import logger | ||||
| from lxml import etree | ||||
| from typing import List | ||||
| import json | ||||
| import re | ||||
|  | ||||
|  | ||||
| # HTML added to be sure each result matching a filter (.example) gets converted to a new line by Inscriptis | ||||
| TEXT_FILTER_LIST_LINE_SUFFIX = "<br>" | ||||
|  | ||||
| TRANSLATE_WHITESPACE_TABLE = str.maketrans('', '', '\r\n\t ') | ||||
| PERL_STYLE_REGEX = r'^/(.*?)/([a-z]*)?$' | ||||
|  | ||||
| # 'price' , 'lowPrice', 'highPrice' are usually under here | ||||
| # All of those may or may not appear on different websites - I didnt find a way todo case-insensitive searching here | ||||
| LD_JSON_PRODUCT_OFFER_SELECTORS = ["json:$..offers", "json:$..Offers"] | ||||
| @@ -54,29 +55,64 @@ def include_filters(include_filters, html_content, append_pretty_line_formatting | ||||
| def subtractive_css_selector(css_selector, html_content): | ||||
|     from bs4 import BeautifulSoup | ||||
|     soup = BeautifulSoup(html_content, "html.parser") | ||||
|     for item in soup.select(css_selector): | ||||
|  | ||||
|     # So that the elements dont shift their index, build a list of elements here which will be pointers to their place in the DOM | ||||
|     elements_to_remove = soup.select(css_selector) | ||||
|  | ||||
|     # Then, remove them in a separate loop | ||||
|     for item in elements_to_remove: | ||||
|         item.decompose() | ||||
|  | ||||
|     return str(soup) | ||||
|  | ||||
| def subtractive_xpath_selector(xpath_selector, html_content):  | ||||
| def subtractive_xpath_selector(selectors: List[str], html_content: str) -> str: | ||||
|     # Parse the HTML content using lxml | ||||
|     html_tree = etree.HTML(html_content) | ||||
|     elements_to_remove = html_tree.xpath(xpath_selector) | ||||
|  | ||||
|     # First, collect all elements to remove | ||||
|     elements_to_remove = [] | ||||
|  | ||||
|     # Iterate over the list of XPath selectors | ||||
|     for selector in selectors: | ||||
|         # Collect elements for each selector | ||||
|         elements_to_remove.extend(html_tree.xpath(selector)) | ||||
|  | ||||
|     # Then, remove them in a separate loop | ||||
|     for element in elements_to_remove: | ||||
|         element.getparent().remove(element) | ||||
|         if element.getparent() is not None:  # Ensure the element has a parent before removing | ||||
|             element.getparent().remove(element) | ||||
|  | ||||
|     # Convert the modified HTML tree back to a string | ||||
|     modified_html = etree.tostring(html_tree, method="html").decode("utf-8") | ||||
|     return modified_html | ||||
|  | ||||
|  | ||||
| def element_removal(selectors: List[str], html_content): | ||||
|     """Removes elements that match a list of CSS or xPath selectors.""" | ||||
|     """Removes elements that match a list of CSS or XPath selectors.""" | ||||
|     modified_html = html_content | ||||
|     css_selectors = [] | ||||
|     xpath_selectors = [] | ||||
|  | ||||
|     for selector in selectors: | ||||
|         if selector.startswith(('xpath:', 'xpath1:', '//')): | ||||
|             # Handle XPath selectors separately | ||||
|             xpath_selector = selector.removeprefix('xpath:').removeprefix('xpath1:') | ||||
|             modified_html = subtractive_xpath_selector(xpath_selector, modified_html) | ||||
|             xpath_selectors.append(xpath_selector) | ||||
|         else: | ||||
|             modified_html = subtractive_css_selector(selector, modified_html) | ||||
|             # Collect CSS selectors as one "hit", see comment in subtractive_css_selector | ||||
|             css_selectors.append(selector.strip().strip(",")) | ||||
|  | ||||
|     if xpath_selectors: | ||||
|         modified_html = subtractive_xpath_selector(xpath_selectors, modified_html) | ||||
|  | ||||
|     if css_selectors: | ||||
|         # Remove duplicates, then combine all CSS selectors into one string, separated by commas | ||||
|         # This stops the elements index shifting | ||||
|         unique_selectors = list(set(css_selectors))  # Ensure uniqueness | ||||
|         combined_css_selector = " , ".join(unique_selectors) | ||||
|         modified_html = subtractive_css_selector(combined_css_selector, modified_html) | ||||
|  | ||||
|  | ||||
|     return modified_html | ||||
|  | ||||
| def elementpath_tostring(obj): | ||||
| @@ -263,8 +299,10 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None | ||||
| # https://github.com/dgtlmoon/changedetection.io/pull/2041#issuecomment-1848397161w | ||||
|     # Try to parse/filter out the JSON, if we get some parser error, then maybe it's embedded within HTML tags | ||||
|     try: | ||||
|         stripped_text_from_html = _parse_json(json.loads(content), json_filter) | ||||
|     except json.JSONDecodeError: | ||||
|         # .lstrip("\ufeff") strings ByteOrderMark from UTF8 and still lets the UTF work | ||||
|         stripped_text_from_html = _parse_json(json.loads(content.lstrip("\ufeff") ), json_filter) | ||||
|     except json.JSONDecodeError as e: | ||||
|         logger.warning(str(e)) | ||||
|  | ||||
|         # Foreach <script json></script> blob.. just return the first that matches json_filter | ||||
|         # As a last resort, try to parse the whole <body> | ||||
| @@ -326,6 +364,7 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None | ||||
| #          - "line numbers" return a list of line numbers that match (int list) | ||||
| # | ||||
| # wordlist - list of regex's (str) or words (str) | ||||
| # Preserves all linefeeds and other whitespacing, its not the job of this to remove that | ||||
| def strip_ignore_text(content, wordlist, mode="content"): | ||||
|     i = 0 | ||||
|     output = [] | ||||
| @@ -341,32 +380,30 @@ def strip_ignore_text(content, wordlist, mode="content"): | ||||
|         else: | ||||
|             ignore_text.append(k.strip()) | ||||
|  | ||||
|     for line in content.splitlines(): | ||||
|     for line in content.splitlines(keepends=True): | ||||
|         i += 1 | ||||
|         # Always ignore blank lines in this mode. (when this function gets called) | ||||
|         got_match = False | ||||
|         if len(line.strip()): | ||||
|             for l in ignore_text: | ||||
|                 if l.lower() in line.lower(): | ||||
|         for l in ignore_text: | ||||
|             if l.lower() in line.lower(): | ||||
|                 got_match = True | ||||
|  | ||||
|         if not got_match: | ||||
|             for r in ignore_regex: | ||||
|                 if r.search(line): | ||||
|                     got_match = True | ||||
|  | ||||
|             if not got_match: | ||||
|                 for r in ignore_regex: | ||||
|                     if r.search(line): | ||||
|                         got_match = True | ||||
|  | ||||
|             if not got_match: | ||||
|                 # Not ignored | ||||
|                 output.append(line.encode('utf8')) | ||||
|             else: | ||||
|                 ignored_line_numbers.append(i) | ||||
|  | ||||
|         if not got_match: | ||||
|             # Not ignored, and should preserve "keepends" | ||||
|             output.append(line) | ||||
|         else: | ||||
|             ignored_line_numbers.append(i) | ||||
|  | ||||
|     # Used for finding out what to highlight | ||||
|     if mode == "line numbers": | ||||
|         return ignored_line_numbers | ||||
|  | ||||
|     return "\n".encode('utf8').join(output) | ||||
|     return ''.join(output) | ||||
|  | ||||
| def cdata_in_document_to_text(html_content: str, render_anchor_tag_content=False) -> str: | ||||
|     from xml.sax.saxutils import escape as xml_escape | ||||
|   | ||||
| @@ -1,4 +1,7 @@ | ||||
| from os import getenv | ||||
|  | ||||
| from changedetectionio.blueprint.rss import RSS_FORMAT_TYPES | ||||
|  | ||||
| from changedetectionio.notification import ( | ||||
|     default_notification_body, | ||||
|     default_notification_format, | ||||
| @@ -9,6 +12,8 @@ from changedetectionio.notification import ( | ||||
| _FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT = 6 | ||||
| DEFAULT_SETTINGS_HEADERS_USERAGENT='Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36' | ||||
|  | ||||
|  | ||||
|  | ||||
| class model(dict): | ||||
|     base_config = { | ||||
|             'note': "Hello! If you change this file manually, please be sure to restart your changedetection.io instance!", | ||||
| @@ -48,11 +53,13 @@ class model(dict): | ||||
|                     'password': False, | ||||
|                     'render_anchor_tag_content': False, | ||||
|                     'rss_access_token': None, | ||||
|                     'rss_content_format': RSS_FORMAT_TYPES[0][0], | ||||
|                     'rss_hide_muted_watches': True, | ||||
|                     'schema_version' : 0, | ||||
|                     'shared_diff_access': False, | ||||
|                     'webdriver_delay': None , # Extra delay in seconds before extracting text | ||||
|                     'tags': {} #@todo use Tag.model initialisers | ||||
|                     'tags': {}, #@todo use Tag.model initialisers | ||||
|                     'timezone': None, # Default IANA timezone name | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
| @@ -68,7 +75,7 @@ def parse_headers_from_text_file(filepath): | ||||
|         for l in f.readlines(): | ||||
|             l = l.strip() | ||||
|             if not l.startswith('#') and ':' in l: | ||||
|                 (k, v) = l.split(':') | ||||
|                 (k, v) = l.split(':', 1)  # Split only on the first colon | ||||
|                 headers[k.strip()] = v.strip() | ||||
|  | ||||
|     return headers | ||||
| @@ -6,6 +6,8 @@ import re | ||||
| from pathlib import Path | ||||
| from loguru import logger | ||||
|  | ||||
| from ..html_tools import TRANSLATE_WHITESPACE_TABLE | ||||
|  | ||||
| # Allowable protocols, protects against javascript: etc | ||||
| # file:// is further checked by ALLOW_FILE_URI | ||||
| SAFE_PROTOCOL_REGEX='^(http|https|ftp|file):' | ||||
| @@ -36,8 +38,9 @@ class model(watch_base): | ||||
|     jitter_seconds = 0 | ||||
|  | ||||
|     def __init__(self, *arg, **kw): | ||||
|         self.__datastore_path = kw['datastore_path'] | ||||
|         del kw['datastore_path'] | ||||
|         self.__datastore_path = kw.get('datastore_path') | ||||
|         if kw.get('datastore_path'): | ||||
|             del kw['datastore_path'] | ||||
|         super(model, self).__init__(*arg, **kw) | ||||
|         if kw.get('default'): | ||||
|             self.update(kw['default']) | ||||
| @@ -80,12 +83,16 @@ class model(watch_base): | ||||
|                     flash, Markup, url_for | ||||
|                 ) | ||||
|                 message = Markup('<a href="{}#general">The URL {} is invalid and cannot be used, click to edit</a>'.format( | ||||
|                     url_for('edit_page', uuid=self.get('uuid')), self.get('url', ''))) | ||||
|                     url_for('ui.ui_edit.edit_page', uuid=self.get('uuid')), self.get('url', ''))) | ||||
|                 flash(message, 'error') | ||||
|                 return '' | ||||
|  | ||||
|         if ready_url.startswith('source:'): | ||||
|             ready_url=ready_url.replace('source:', '') | ||||
|  | ||||
|         # Also double check it after any Jinja2 formatting just incase | ||||
|         if not is_safe_url(ready_url): | ||||
|             return 'DISABLED' | ||||
|         return ready_url | ||||
|  | ||||
|     def clear_watch(self): | ||||
| @@ -171,6 +178,10 @@ class model(watch_base): | ||||
|         """ | ||||
|         tmp_history = {} | ||||
|  | ||||
|         # In the case we are only using the watch for processing without history | ||||
|         if not self.watch_data_dir: | ||||
|             return [] | ||||
|  | ||||
|         # Read the history file as a dict | ||||
|         fname = os.path.join(self.watch_data_dir, "history.txt") | ||||
|         if os.path.isfile(fname): | ||||
| @@ -236,37 +247,32 @@ class model(watch_base): | ||||
|         bump = self.history | ||||
|         return self.__newest_history_key | ||||
|  | ||||
|     # Given an arbitrary timestamp, find the closest next key | ||||
|     # For example, last_viewed = 1000 so it should return the next 1001 timestamp | ||||
|     # | ||||
|     # used for the [diff] button so it can preset a smarter from_version | ||||
|     # Given an arbitrary timestamp, find the best history key for the [diff] button so it can preset a smarter from_version | ||||
|     @property | ||||
|     def get_next_snapshot_key_to_last_viewed(self): | ||||
|     def get_from_version_based_on_last_viewed(self): | ||||
|  | ||||
|         """Unfortunately for now timestamp is stored as string key""" | ||||
|         keys = list(self.history.keys()) | ||||
|         if not keys: | ||||
|             return None | ||||
|         if len(keys) == 1: | ||||
|             return keys[0] | ||||
|  | ||||
|         last_viewed = int(self.get('last_viewed')) | ||||
|         prev_k = keys[0] | ||||
|         sorted_keys = sorted(keys, key=lambda x: int(x)) | ||||
|         sorted_keys.reverse() | ||||
|  | ||||
|         # When the 'last viewed' timestamp is greater than the newest snapshot, return second last | ||||
|         if last_viewed > int(sorted_keys[0]): | ||||
|         # When the 'last viewed' timestamp is greater than or equal the newest snapshot, return second newest | ||||
|         if last_viewed >= int(sorted_keys[0]): | ||||
|             return sorted_keys[1] | ||||
|          | ||||
|         # When the 'last viewed' timestamp is between snapshots, return the older snapshot | ||||
|         for newer, older in list(zip(sorted_keys[0:], sorted_keys[1:])): | ||||
|             if last_viewed < int(newer) and last_viewed >= int(older): | ||||
|                 return older | ||||
|  | ||||
|         for k in sorted_keys: | ||||
|             if int(k) < last_viewed: | ||||
|                 if prev_k == sorted_keys[0]: | ||||
|                     # Return the second last one so we dont recommend the same version compares itself | ||||
|                     return sorted_keys[1] | ||||
|  | ||||
|                 return prev_k | ||||
|             prev_k = k | ||||
|  | ||||
|         return keys[0] | ||||
|         # When the 'last viewed' timestamp is less than the oldest snapshot, return oldest | ||||
|         return sorted_keys[-1] | ||||
|  | ||||
|     def get_history_snapshot(self, timestamp): | ||||
|         import brotli | ||||
| @@ -290,11 +296,11 @@ class model(watch_base): | ||||
|         with open(filepath, 'r', encoding='utf-8', errors='ignore') as f: | ||||
|             return f.read() | ||||
|  | ||||
|     # Save some text file to the appropriate path and bump the history | ||||
|    # Save some text file to the appropriate path and bump the history | ||||
|     # result_obj from fetch_site_status.run() | ||||
|     def save_history_text(self, contents, timestamp, snapshot_id): | ||||
|         import brotli | ||||
|  | ||||
|         import tempfile | ||||
|         logger.trace(f"{self.get('uuid')} - Updating history.txt with timestamp {timestamp}") | ||||
|  | ||||
|         self.ensure_data_dir_exists() | ||||
| @@ -302,33 +308,43 @@ class model(watch_base): | ||||
|         threshold = int(os.getenv('SNAPSHOT_BROTLI_COMPRESSION_THRESHOLD', 1024)) | ||||
|         skip_brotli = strtobool(os.getenv('DISABLE_BROTLI_TEXT_SNAPSHOT', 'False')) | ||||
|  | ||||
|         # Decide on snapshot filename and destination path | ||||
|         if not skip_brotli and len(contents) > threshold: | ||||
|             snapshot_fname = f"{snapshot_id}.txt.br" | ||||
|             dest = os.path.join(self.watch_data_dir, snapshot_fname) | ||||
|             if not os.path.exists(dest): | ||||
|                 with open(dest, 'wb') as f: | ||||
|                     f.write(brotli.compress(contents, mode=brotli.MODE_TEXT)) | ||||
|             encoded_data = brotli.compress(contents.encode('utf-8'), mode=brotli.MODE_TEXT) | ||||
|         else: | ||||
|             snapshot_fname = f"{snapshot_id}.txt" | ||||
|             dest = os.path.join(self.watch_data_dir, snapshot_fname) | ||||
|             if not os.path.exists(dest): | ||||
|                 with open(dest, 'wb') as f: | ||||
|                     f.write(contents) | ||||
|             encoded_data = contents.encode('utf-8') | ||||
|  | ||||
|         # Append to index | ||||
|         # @todo check last char was \n | ||||
|         dest = os.path.join(self.watch_data_dir, snapshot_fname) | ||||
|  | ||||
|         # Write snapshot file atomically if it doesn't exist | ||||
|         if not os.path.exists(dest): | ||||
|             with tempfile.NamedTemporaryFile('wb', delete=False, dir=self.watch_data_dir) as tmp: | ||||
|                 tmp.write(encoded_data) | ||||
|                 tmp.flush() | ||||
|                 os.fsync(tmp.fileno()) | ||||
|                 tmp_path = tmp.name | ||||
|             os.rename(tmp_path, dest) | ||||
|  | ||||
|         # Append to history.txt atomically | ||||
|         index_fname = os.path.join(self.watch_data_dir, "history.txt") | ||||
|         with open(index_fname, 'a') as f: | ||||
|             f.write("{},{}\n".format(timestamp, snapshot_fname)) | ||||
|             f.close() | ||||
|         index_line = f"{timestamp},{snapshot_fname}\n" | ||||
|  | ||||
|         # Lets try force flush here since it's usually a very small file | ||||
|         # If this still fails in the future then try reading all to memory first, re-writing etc | ||||
|         with open(index_fname, 'a', encoding='utf-8') as f: | ||||
|             f.write(index_line) | ||||
|             f.flush() | ||||
|             os.fsync(f.fileno()) | ||||
|  | ||||
|         # Update internal state | ||||
|         self.__newest_history_key = timestamp | ||||
|         self.__history_n += 1 | ||||
|  | ||||
|         # @todo bump static cache of the last timestamp so we dont need to examine the file to set a proper ''viewed'' status | ||||
|         return snapshot_fname | ||||
|  | ||||
|     @property | ||||
|     @property | ||||
|     def has_empty_checktime(self): | ||||
|         # using all() + dictionary comprehension | ||||
| @@ -345,14 +361,32 @@ class model(watch_base): | ||||
|         return seconds | ||||
|  | ||||
|     # Iterate over all history texts and see if something new exists | ||||
|     def lines_contain_something_unique_compared_to_history(self, lines: list): | ||||
|         local_lines = set([l.decode('utf-8').strip().lower() for l in lines]) | ||||
|     # Always applying .strip() to start/end but optionally replace any other whitespace | ||||
|     def lines_contain_something_unique_compared_to_history(self, lines: list, ignore_whitespace=False): | ||||
|         local_lines = set([]) | ||||
|         if lines: | ||||
|             if ignore_whitespace: | ||||
|                 if isinstance(lines[0], str): # Can be either str or bytes depending on what was on the disk | ||||
|                     local_lines = set([l.translate(TRANSLATE_WHITESPACE_TABLE).lower() for l in lines]) | ||||
|                 else: | ||||
|                     local_lines = set([l.decode('utf-8').translate(TRANSLATE_WHITESPACE_TABLE).lower() for l in lines]) | ||||
|             else: | ||||
|                 if isinstance(lines[0], str): # Can be either str or bytes depending on what was on the disk | ||||
|                     local_lines = set([l.strip().lower() for l in lines]) | ||||
|                 else: | ||||
|                     local_lines = set([l.decode('utf-8').strip().lower() for l in lines]) | ||||
|  | ||||
|  | ||||
|         # Compare each lines (set) against each history text file (set) looking for something new.. | ||||
|         existing_history = set({}) | ||||
|         for k, v in self.history.items(): | ||||
|             content = self.get_history_snapshot(k) | ||||
|             alist = set([line.strip().lower() for line in content.splitlines()]) | ||||
|  | ||||
|             if ignore_whitespace: | ||||
|                 alist = set([line.translate(TRANSLATE_WHITESPACE_TABLE).lower() for line in content.splitlines()]) | ||||
|             else: | ||||
|                 alist = set([line.strip().lower() for line in content.splitlines()]) | ||||
|  | ||||
|             existing_history = existing_history.union(alist) | ||||
|  | ||||
|         # Check that everything in local_lines(new stuff) already exists in existing_history - it should | ||||
| @@ -396,8 +430,8 @@ class model(watch_base): | ||||
|     @property | ||||
|     def watch_data_dir(self): | ||||
|         # The base dir of the watch data | ||||
|         return os.path.join(self.__datastore_path, self['uuid']) | ||||
|      | ||||
|         return os.path.join(self.__datastore_path, self['uuid']) if self.__datastore_path else None | ||||
|  | ||||
|     def get_error_text(self): | ||||
|         """Return the text saved from a previous request that resulted in a non-200 error""" | ||||
|         fname = os.path.join(self.watch_data_dir, "last-error.txt") | ||||
| @@ -504,21 +538,22 @@ class model(watch_base): | ||||
|     def save_error_text(self, contents): | ||||
|         self.ensure_data_dir_exists() | ||||
|         target_path = os.path.join(self.watch_data_dir, "last-error.txt") | ||||
|         with open(target_path, 'w') as f: | ||||
|         with open(target_path, 'w', encoding='utf-8') as f: | ||||
|             f.write(contents) | ||||
|  | ||||
|     def save_xpath_data(self, data, as_error=False): | ||||
|         import json | ||||
|         import zlib | ||||
|  | ||||
|         if as_error: | ||||
|             target_path = os.path.join(self.watch_data_dir, "elements-error.json") | ||||
|             target_path = os.path.join(str(self.watch_data_dir), "elements-error.deflate") | ||||
|         else: | ||||
|             target_path = os.path.join(self.watch_data_dir, "elements.json") | ||||
|             target_path = os.path.join(str(self.watch_data_dir), "elements.deflate") | ||||
|  | ||||
|         self.ensure_data_dir_exists() | ||||
|  | ||||
|         with open(target_path, 'w') as f: | ||||
|             f.write(json.dumps(data)) | ||||
|         with open(target_path, 'wb') as f: | ||||
|             f.write(zlib.compress(json.dumps(data).encode())) | ||||
|             f.close() | ||||
|  | ||||
|     # Save as PNG, PNG is larger but better for doing visual diff in the future | ||||
| @@ -540,7 +575,7 @@ class model(watch_base): | ||||
|         import brotli | ||||
|         filepath = os.path.join(self.watch_data_dir, 'last-fetched.br') | ||||
|  | ||||
|         if not os.path.isfile(filepath): | ||||
|         if not os.path.isfile(filepath) or os.path.getsize(filepath) == 0: | ||||
|             # If a previous attempt doesnt yet exist, just snarf the previous snapshot instead | ||||
|             dates = list(self.history.keys()) | ||||
|             if len(dates): | ||||
|   | ||||
| @@ -18,6 +18,7 @@ class watch_base(dict): | ||||
|             'check_count': 0, | ||||
|             'check_unique_lines': False,  # On change-detected, compare against all history if its something new | ||||
|             'consecutive_filter_failures': 0,  # Every time the CSS/xPath filter cannot be located, reset when all is fine. | ||||
|             'content-type': None, | ||||
|             'date_created': None, | ||||
|             'extract_text': [],  # Extract text by regex after filters | ||||
|             'extract_title_as_title': False, | ||||
| @@ -58,6 +59,65 @@ class watch_base(dict): | ||||
|             'text_should_not_be_present': [],  # Text that should not present | ||||
|             'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None}, | ||||
|             'time_between_check_use_default': True, | ||||
|             "time_schedule_limit": { | ||||
|                 "enabled": False, | ||||
|                 "monday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|                 "tuesday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|                 "wednesday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|                 "thursday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|                 "friday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|                 "saturday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|                 "sunday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|             }, | ||||
|             'title': None, | ||||
|             'track_ldjson_price_data': None, | ||||
|             'trim_text_whitespace': False, | ||||
|   | ||||
| @@ -4,6 +4,9 @@ from apprise import NotifyFormat | ||||
| import apprise | ||||
| from loguru import logger | ||||
|  | ||||
| from .apprise_plugin.assets import APPRISE_AVATAR_URL | ||||
| from .apprise_plugin.custom_handlers import apprise_http_custom_handler  # noqa: F401 | ||||
| from .safe_jinja import render as jinja_render | ||||
|  | ||||
| valid_tokens = { | ||||
|     'base_url': '', | ||||
| @@ -23,7 +26,7 @@ valid_tokens = { | ||||
| } | ||||
|  | ||||
| default_notification_format_for_watch = 'System default' | ||||
| default_notification_format = 'Text' | ||||
| default_notification_format = 'HTML Color' | ||||
| default_notification_body = '{{watch_url}} had a change.\n---\n{{diff}}\n---\n' | ||||
| default_notification_title = 'ChangeDetection.io Notification - {{watch_url}}' | ||||
|  | ||||
| @@ -31,6 +34,7 @@ valid_notification_formats = { | ||||
|     'Text': NotifyFormat.TEXT, | ||||
|     'Markdown': NotifyFormat.MARKDOWN, | ||||
|     'HTML': NotifyFormat.HTML, | ||||
|     'HTML Color': 'htmlcolor', | ||||
|     # Used only for editing a watch (not for global) | ||||
|     default_notification_format_for_watch: default_notification_format_for_watch | ||||
| } | ||||
| @@ -38,10 +42,6 @@ valid_notification_formats = { | ||||
|  | ||||
|  | ||||
| def process_notification(n_object, datastore): | ||||
|     # so that the custom endpoints are registered | ||||
|     from changedetectionio.apprise_plugin import apprise_custom_api_call_wrapper | ||||
|  | ||||
|     from .safe_jinja import render as jinja_render | ||||
|     now = time.time() | ||||
|     if n_object.get('notification_timestamp'): | ||||
|         logger.trace(f"Time since queued {now-n_object['notification_timestamp']:.3f}s") | ||||
| @@ -65,8 +65,12 @@ def process_notification(n_object, datastore): | ||||
|     # raise it as an exception | ||||
|  | ||||
|     sent_objs = [] | ||||
|     from .apprise_asset import asset | ||||
|     apobj = apprise.Apprise(debug=True, asset=asset) | ||||
|     from .apprise_plugin.assets import apprise_asset | ||||
|  | ||||
|     if 'as_async' in n_object: | ||||
|         apprise_asset.async_mode = n_object.get('as_async') | ||||
|  | ||||
|     apobj = apprise.Apprise(debug=True, asset=apprise_asset) | ||||
|  | ||||
|     if not n_object.get('notification_urls'): | ||||
|         return None | ||||
| @@ -76,9 +80,16 @@ def process_notification(n_object, datastore): | ||||
|  | ||||
|             # Get the notification body from datastore | ||||
|             n_body = jinja_render(template_str=n_object.get('notification_body', ''), **notification_parameters) | ||||
|             if n_object.get('notification_format', '').startswith('HTML'): | ||||
|                 n_body = n_body.replace("\n", '<br>') | ||||
|  | ||||
|             n_title = jinja_render(template_str=n_object.get('notification_title', ''), **notification_parameters) | ||||
|  | ||||
|             url = url.strip() | ||||
|             if url.startswith('#'): | ||||
|                 logger.trace(f"Skipping commented out notification URL - {url}") | ||||
|                 continue | ||||
|  | ||||
|             if not url: | ||||
|                 logger.warning(f"Process Notification: skipping empty notification URL.") | ||||
|                 continue | ||||
| @@ -100,7 +111,7 @@ def process_notification(n_object, datastore): | ||||
|                     and not url.startswith('get') \ | ||||
|                     and not url.startswith('delete') \ | ||||
|                     and not url.startswith('put'): | ||||
|                 url += k + 'avatar_url=https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png' | ||||
|                 url += k + f"avatar_url={APPRISE_AVATAR_URL}" | ||||
|  | ||||
|             if url.startswith('tgram://'): | ||||
|                 # Telegram only supports a limit subset of HTML, remove the '<br>' we place in. | ||||
| @@ -149,8 +160,6 @@ def process_notification(n_object, datastore): | ||||
|             attach=n_object.get('screenshot', None) | ||||
|         ) | ||||
|  | ||||
|         # Give apprise time to register an error | ||||
|         time.sleep(3) | ||||
|  | ||||
|         # Returns empty string if nothing found, multi-line string otherwise | ||||
|         log_value = logs.getvalue() | ||||
|   | ||||
| @@ -1,14 +1,14 @@ | ||||
| from abc import abstractmethod | ||||
| from changedetectionio.content_fetchers.base import Fetcher | ||||
| from changedetectionio.strtobool import strtobool | ||||
|  | ||||
| from copy import deepcopy | ||||
| from loguru import logger | ||||
| import hashlib | ||||
| import os | ||||
| import re | ||||
| import importlib | ||||
| import pkgutil | ||||
| import inspect | ||||
| import os | ||||
| import pkgutil | ||||
| import re | ||||
|  | ||||
| class difference_detection_processor(): | ||||
|  | ||||
| @@ -18,30 +18,33 @@ class difference_detection_processor(): | ||||
|     screenshot = None | ||||
|     watch = None | ||||
|     xpath_data = None | ||||
|     preferred_proxy = None | ||||
|  | ||||
|     def __init__(self, *args, datastore, watch_uuid, **kwargs): | ||||
|         super().__init__(*args, **kwargs) | ||||
|         self.datastore = datastore | ||||
|         self.watch = deepcopy(self.datastore.data['watching'].get(watch_uuid)) | ||||
|         # Generic fetcher that should be extended (requests, playwright etc) | ||||
|         self.fetcher = Fetcher() | ||||
|  | ||||
|     def call_browser(self, preferred_proxy_id=None): | ||||
|  | ||||
|     def call_browser(self): | ||||
|         from requests.structures import CaseInsensitiveDict | ||||
|         from changedetectionio.content_fetchers.exceptions import EmptyReply | ||||
|  | ||||
|         # Protect against file:// access | ||||
|         if re.search(r'^file://', self.watch.get('url', '').strip(), re.IGNORECASE): | ||||
|         url = self.watch.link | ||||
|  | ||||
|         # Protect against file:, file:/, file:// access, check the real "link" without any meta "source:" etc prepended. | ||||
|         if re.search(r'^file:', url.strip(), re.IGNORECASE): | ||||
|             if not strtobool(os.getenv('ALLOW_FILE_URI', 'false')): | ||||
|                 raise Exception( | ||||
|                     "file:// type access is denied for security reasons." | ||||
|                 ) | ||||
|  | ||||
|         url = self.watch.link | ||||
|  | ||||
|         # Requests, playwright, other browser via wss:// etc, fetch_extra_something | ||||
|         prefer_fetch_backend = self.watch.get('fetch_backend', 'system') | ||||
|  | ||||
|         # Proxy ID "key" | ||||
|         preferred_proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=self.watch.get('uuid')) | ||||
|         preferred_proxy_id = preferred_proxy_id if preferred_proxy_id else self.datastore.get_preferred_proxy_for_watch(uuid=self.watch.get('uuid')) | ||||
|  | ||||
|         # Pluggable content self.fetcher | ||||
|         if not prefer_fetch_backend or prefer_fetch_backend == 'system': | ||||
| @@ -99,6 +102,7 @@ class difference_detection_processor(): | ||||
|             self.fetcher.browser_steps_screenshot_path = os.path.join(self.datastore.datastore_path, self.watch.get('uuid')) | ||||
|  | ||||
|         # Tweak the base config with the per-watch ones | ||||
|         from changedetectionio.safe_jinja import render as jinja_render | ||||
|         request_headers = CaseInsensitiveDict() | ||||
|  | ||||
|         ua = self.datastore.data['settings']['requests'].get('default_ua') | ||||
| @@ -115,9 +119,15 @@ class difference_detection_processor(): | ||||
|         if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']: | ||||
|             request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '') | ||||
|  | ||||
|         for header_name in request_headers: | ||||
|             request_headers.update({header_name: jinja_render(template_str=request_headers.get(header_name))}) | ||||
|  | ||||
|         timeout = self.datastore.data['settings']['requests'].get('timeout') | ||||
|  | ||||
|         request_body = self.watch.get('body') | ||||
|         if request_body: | ||||
|             request_body = jinja_render(template_str=self.watch.get('body')) | ||||
|          | ||||
|         request_method = self.watch.get('method') | ||||
|         ignore_status_codes = self.watch.get('ignore_status_codes', False) | ||||
|  | ||||
| @@ -154,7 +164,7 @@ class difference_detection_processor(): | ||||
|         # After init, call run_changedetection() which will do the actual change-detection | ||||
|  | ||||
|     @abstractmethod | ||||
|     def run_changedetection(self, watch, skip_when_checksum_same=True): | ||||
|     def run_changedetection(self, watch): | ||||
|         update_obj = {'last_notification_error': False, 'last_error': False} | ||||
|         some_data = 'xxxxx' | ||||
|         update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest() | ||||
|   | ||||
| @@ -27,22 +27,27 @@ def _search_prop_by_value(matches, value): | ||||
|                 return prop[1]  # Yield the desired value and exit the function | ||||
|  | ||||
| def _deduplicate_prices(data): | ||||
|     seen = set() | ||||
|     unique_data = [] | ||||
|     import re | ||||
|  | ||||
|     ''' | ||||
|     Some price data has multiple entries, OR it has a single entry with ['$159', '159', 159, "$ 159"] or just "159" | ||||
|     Get all the values, clean it and add it to a set then return the unique values | ||||
|     ''' | ||||
|     unique_data = set() | ||||
|  | ||||
|     # Return the complete 'datum' where its price was not seen before | ||||
|     for datum in data: | ||||
|         # Convert 'value' to float if it can be a numeric string, otherwise leave it as is | ||||
|         try: | ||||
|             normalized_value = float(datum.value) if isinstance(datum.value, str) and datum.value.replace('.', '', 1).isdigit() else datum.value | ||||
|         except ValueError: | ||||
|             normalized_value = datum.value | ||||
|  | ||||
|         # If the normalized value hasn't been seen yet, add it to unique data | ||||
|         if normalized_value not in seen: | ||||
|             unique_data.append(datum) | ||||
|             seen.add(normalized_value) | ||||
|      | ||||
|     return unique_data | ||||
|         if isinstance(datum.value, list): | ||||
|             # Process each item in the list | ||||
|             normalized_value = set([float(re.sub(r'[^\d.]', '', str(item))) for item in datum.value if str(item).strip()]) | ||||
|             unique_data.update(normalized_value) | ||||
|         else: | ||||
|             # Process single value | ||||
|             v = float(re.sub(r'[^\d.]', '', str(datum.value))) | ||||
|             unique_data.add(v) | ||||
|  | ||||
|     return list(unique_data) | ||||
|  | ||||
|  | ||||
| # should return Restock() | ||||
| @@ -83,14 +88,13 @@ def get_itemprop_availability(html_content) -> Restock: | ||||
|         if price_result: | ||||
|             # Right now, we just support single product items, maybe we will store the whole actual metadata seperately in teh future and | ||||
|             # parse that for the UI? | ||||
|             prices_found = set(str(item.value).replace('$', '') for item in price_result) | ||||
|             if len(price_result) > 1 and len(prices_found) > 1: | ||||
|             if len(price_result) > 1 and len(price_result) > 1: | ||||
|                 # See of all prices are different, in the case that one product has many embedded data types with the same price | ||||
|                 # One might have $121.95 and another 121.95 etc | ||||
|                 logger.warning(f"More than one price found {prices_found}, throwing exception, cant use this plugin.") | ||||
|                 logger.warning(f"More than one price found {price_result}, throwing exception, cant use this plugin.") | ||||
|                 raise MoreThanOnePriceFound() | ||||
|  | ||||
|             value['price'] = price_result[0].value | ||||
|             value['price'] = price_result[0] | ||||
|  | ||||
|         pricecurrency_result = pricecurrency_parse.find(data) | ||||
|         if pricecurrency_result: | ||||
| @@ -140,11 +144,9 @@ class perform_site_check(difference_detection_processor): | ||||
|     screenshot = None | ||||
|     xpath_data = None | ||||
|  | ||||
|     def run_changedetection(self, watch, skip_when_checksum_same=True): | ||||
|     def run_changedetection(self, watch): | ||||
|         import hashlib | ||||
|  | ||||
|         from concurrent.futures import ProcessPoolExecutor | ||||
|         from functools import partial | ||||
|         if not watch: | ||||
|             raise Exception("Watch no longer exists.") | ||||
|  | ||||
| @@ -186,11 +188,7 @@ class perform_site_check(difference_detection_processor): | ||||
|  | ||||
|         itemprop_availability = {} | ||||
|         try: | ||||
|             with ProcessPoolExecutor() as executor: | ||||
|                 # Use functools.partial to create a callable with arguments | ||||
|                 # anything using bs4/lxml etc is quite "leaky" | ||||
|                 future = executor.submit(partial(get_itemprop_availability, self.fetcher.content)) | ||||
|                 itemprop_availability = future.result() | ||||
|             itemprop_availability = get_itemprop_availability(self.fetcher.content) | ||||
|         except MoreThanOnePriceFound as e: | ||||
|             # Add the real data | ||||
|             raise ProcessorException(message="Cannot run, more than one price detected, this plugin is only for product pages with ONE product, try the content-change detection mode.", | ||||
| @@ -226,7 +224,7 @@ class perform_site_check(difference_detection_processor): | ||||
|             itemprop_availability['original_price'] = itemprop_availability.get('price') | ||||
|             update_obj['restock']["original_price"] = itemprop_availability.get('price') | ||||
|  | ||||
|         if not self.fetcher.instock_data and not itemprop_availability.get('availability'): | ||||
|         if not self.fetcher.instock_data and not itemprop_availability.get('availability') and not itemprop_availability.get('price'): | ||||
|             raise ProcessorException( | ||||
|                 message=f"Unable to extract restock data for this page unfortunately. (Got code {self.fetcher.get_last_status_code()} from server), no embedded stock information was found and nothing interesting in the text, try using this watch with Chrome.", | ||||
|                 url=watch.get('url'), | ||||
| @@ -235,12 +233,21 @@ class perform_site_check(difference_detection_processor): | ||||
|                 xpath_data=self.fetcher.xpath_data | ||||
|                 ) | ||||
|  | ||||
|         logger.debug(f"self.fetcher.instock_data is - '{self.fetcher.instock_data}' and itemprop_availability.get('availability') is {itemprop_availability.get('availability')}") | ||||
|         # Nothing automatic in microdata found, revert to scraping the page | ||||
|         if self.fetcher.instock_data and itemprop_availability.get('availability') is None: | ||||
|             # 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold. | ||||
|             # Careful! this does not really come from chrome/js when the watch is set to plaintext | ||||
|             update_obj['restock']["in_stock"] = True if self.fetcher.instock_data == 'Possibly in stock' else False | ||||
|             logger.debug(f"Watch UUID {watch.get('uuid')} restock check returned '{self.fetcher.instock_data}' from JS scraper.") | ||||
|             logger.debug(f"Watch UUID {watch.get('uuid')} restock check returned instock_data - '{self.fetcher.instock_data}' from JS scraper.") | ||||
|  | ||||
|         # Very often websites will lie about the 'availability' in the metadata, so if the scraped version says its NOT in stock, use that. | ||||
|         if self.fetcher.instock_data and self.fetcher.instock_data != 'Possibly in stock': | ||||
|             if update_obj['restock'].get('in_stock'): | ||||
|                 logger.warning( | ||||
|                     f"Lie detected in the availability machine data!! when scraping said its not in stock!! itemprop was '{itemprop_availability}' and scraped from browser was '{self.fetcher.instock_data}' update obj was {update_obj['restock']} ") | ||||
|                 logger.warning(f"Setting instock to FALSE, scraper found '{self.fetcher.instock_data}' in the body but metadata reported not-in-stock") | ||||
|                 update_obj['restock']["in_stock"] = False | ||||
|  | ||||
|         # What we store in the snapshot | ||||
|         price = update_obj.get('restock').get('price') if update_obj.get('restock').get('price') else "" | ||||
| @@ -304,4 +311,4 @@ class perform_site_check(difference_detection_processor): | ||||
|         # Always record the new checksum | ||||
|         update_obj["previous_md5"] = fetched_md5 | ||||
|  | ||||
|         return changed_detected, update_obj, snapshot_content.encode('utf-8').strip() | ||||
|         return changed_detected, update_obj, snapshot_content.strip() | ||||
|   | ||||
| @@ -0,0 +1,114 @@ | ||||
|  | ||||
| from loguru import logger | ||||
|  | ||||
|  | ||||
|  | ||||
| def _task(watch, update_handler): | ||||
|     from changedetectionio.content_fetchers.exceptions import ReplyWithContentButNoText | ||||
|     from changedetectionio.processors.text_json_diff.processor import FilterNotFoundInResponse | ||||
|  | ||||
|     text_after_filter = '' | ||||
|  | ||||
|     try: | ||||
|         # The slow process (we run 2 of these in parallel) | ||||
|         changed_detected, update_obj, text_after_filter = update_handler.run_changedetection(watch=watch) | ||||
|     except FilterNotFoundInResponse as e: | ||||
|         text_after_filter = f"Filter not found in HTML: {str(e)}" | ||||
|     except ReplyWithContentButNoText as e: | ||||
|         text_after_filter = f"Filter found but no text (empty result)" | ||||
|     except Exception as e: | ||||
|         text_after_filter = f"Error: {str(e)}" | ||||
|  | ||||
|     if not text_after_filter.strip(): | ||||
|         text_after_filter = 'Empty content' | ||||
|  | ||||
|     # because run_changedetection always returns bytes due to saving the snapshots etc | ||||
|     text_after_filter = text_after_filter.decode('utf-8') if isinstance(text_after_filter, bytes) else text_after_filter | ||||
|  | ||||
|     return text_after_filter | ||||
|  | ||||
|  | ||||
| def prepare_filter_prevew(datastore, watch_uuid, form_data): | ||||
|     '''Used by @app.route("/edit/<string:uuid>/preview-rendered", methods=['POST'])''' | ||||
|     from changedetectionio import forms, html_tools | ||||
|     from changedetectionio.model.Watch import model as watch_model | ||||
|     from concurrent.futures import ProcessPoolExecutor | ||||
|     from copy import deepcopy | ||||
|     from flask import request | ||||
|     import brotli | ||||
|     import importlib | ||||
|     import os | ||||
|     import time | ||||
|     now = time.time() | ||||
|  | ||||
|     text_after_filter = '' | ||||
|     text_before_filter = '' | ||||
|     trigger_line_numbers = [] | ||||
|     ignore_line_numbers = [] | ||||
|  | ||||
|     tmp_watch = deepcopy(datastore.data['watching'].get(watch_uuid)) | ||||
|  | ||||
|     if tmp_watch and tmp_watch.history and os.path.isdir(tmp_watch.watch_data_dir): | ||||
|         # Splice in the temporary stuff from the form | ||||
|         form = forms.processor_text_json_diff_form(formdata=form_data if request.method == 'POST' else None, | ||||
|                                                    data=form_data | ||||
|                                                    ) | ||||
|  | ||||
|         # Only update vars that came in via the AJAX post | ||||
|         p = {k: v for k, v in form.data.items() if k in form_data.keys()} | ||||
|         tmp_watch.update(p) | ||||
|         blank_watch_no_filters = watch_model() | ||||
|         blank_watch_no_filters['url'] = tmp_watch.get('url') | ||||
|  | ||||
|         latest_filename = next(reversed(tmp_watch.history)) | ||||
|         html_fname = os.path.join(tmp_watch.watch_data_dir, f"{latest_filename}.html.br") | ||||
|         with open(html_fname, 'rb') as f: | ||||
|             decompressed_data = brotli.decompress(f.read()).decode('utf-8') if html_fname.endswith('.br') else f.read().decode('utf-8') | ||||
|  | ||||
|             # Just like a normal change detection except provide a fake "watch" object and dont call .call_browser() | ||||
|             processor_module = importlib.import_module("changedetectionio.processors.text_json_diff.processor") | ||||
|             update_handler = processor_module.perform_site_check(datastore=datastore, | ||||
|                                                                  watch_uuid=tmp_watch.get('uuid')  # probably not needed anymore anyway? | ||||
|                                                                  ) | ||||
|             # Use the last loaded HTML as the input | ||||
|             update_handler.datastore = datastore | ||||
|             update_handler.fetcher.content = str(decompressed_data) # str() because playwright/puppeteer/requests return string | ||||
|             update_handler.fetcher.headers['content-type'] = tmp_watch.get('content-type') | ||||
|  | ||||
|             # Process our watch with filters and the HTML from disk, and also a blank watch with no filters but also with the same HTML from disk | ||||
|             # Do this as a parallel process because it could take some time | ||||
|             with ProcessPoolExecutor(max_workers=2) as executor: | ||||
|                 future1 = executor.submit(_task, tmp_watch, update_handler) | ||||
|                 future2 = executor.submit(_task, blank_watch_no_filters, update_handler) | ||||
|  | ||||
|                 text_after_filter = future1.result() | ||||
|                 text_before_filter = future2.result() | ||||
|  | ||||
|     try: | ||||
|         trigger_line_numbers = html_tools.strip_ignore_text(content=text_after_filter, | ||||
|                                                             wordlist=tmp_watch['trigger_text'], | ||||
|                                                             mode='line numbers' | ||||
|                                                             ) | ||||
|     except Exception as e: | ||||
|         text_before_filter = f"Error: {str(e)}" | ||||
|  | ||||
|     try: | ||||
|         text_to_ignore = tmp_watch.get('ignore_text', []) + datastore.data['settings']['application'].get('global_ignore_text', []) | ||||
|         ignore_line_numbers = html_tools.strip_ignore_text(content=text_after_filter, | ||||
|                                                            wordlist=text_to_ignore, | ||||
|                                                            mode='line numbers' | ||||
|                                                            ) | ||||
|     except Exception as e: | ||||
|         text_before_filter = f"Error: {str(e)}" | ||||
|  | ||||
|     logger.trace(f"Parsed in {time.time() - now:.3f}s") | ||||
|  | ||||
|     return ({ | ||||
|             'after_filter': text_after_filter, | ||||
|             'before_filter': text_before_filter.decode('utf-8') if isinstance(text_before_filter, bytes) else text_before_filter, | ||||
|             'duration': time.time() - now, | ||||
|             'trigger_line_numbers': trigger_line_numbers, | ||||
|             'ignore_line_numbers': ignore_line_numbers, | ||||
|         }) | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -6,8 +6,9 @@ import os | ||||
| import re | ||||
| import urllib3 | ||||
|  | ||||
| from changedetectionio.conditions import execute_ruleset_against_all_plugins | ||||
| from changedetectionio.processors import difference_detection_processor | ||||
| from changedetectionio.html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text | ||||
| from changedetectionio.html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text, TRANSLATE_WHITESPACE_TABLE | ||||
| from changedetectionio import html_tools, content_fetchers | ||||
| from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT | ||||
| from loguru import logger | ||||
| @@ -35,10 +36,7 @@ class PDFToHTMLToolNotFound(ValueError): | ||||
| # (set_proxy_from_list) | ||||
| class perform_site_check(difference_detection_processor): | ||||
|  | ||||
|     def run_changedetection(self, watch, skip_when_checksum_same=True): | ||||
|         from concurrent.futures import ProcessPoolExecutor | ||||
|         from functools import partial | ||||
|  | ||||
|     def run_changedetection(self, watch): | ||||
|         changed_detected = False | ||||
|         html_content = "" | ||||
|         screenshot = False  # as bytes | ||||
| @@ -61,9 +59,6 @@ class perform_site_check(difference_detection_processor): | ||||
|         # Watches added automatically in the queue manager will skip if its the same checksum as the previous run | ||||
|         # Saves a lot of CPU | ||||
|         update_obj['previous_md5_before_filters'] = hashlib.md5(self.fetcher.content.encode('utf-8')).hexdigest() | ||||
|         if skip_when_checksum_same: | ||||
|             if update_obj['previous_md5_before_filters'] == watch.get('previous_md5_before_filters'): | ||||
|                 raise content_fetchers.exceptions.checksumFromPreviousCheckWasTheSame() | ||||
|  | ||||
|         # Fetching complete, now filters | ||||
|  | ||||
| @@ -174,30 +169,20 @@ class perform_site_check(difference_detection_processor): | ||||
|                     for filter_rule in include_filters_rule: | ||||
|                         # For HTML/XML we offer xpath as an option, just start a regular xPath "/.." | ||||
|                         if filter_rule[0] == '/' or filter_rule.startswith('xpath:'): | ||||
|                             with ProcessPoolExecutor() as executor: | ||||
|                                 # Use functools.partial to create a callable with arguments - anything using bs4/lxml etc is quite "leaky" | ||||
|                                 future = executor.submit(partial(html_tools.xpath_filter, xpath_filter=filter_rule.replace('xpath:', ''), | ||||
|                             html_content += html_tools.xpath_filter(xpath_filter=filter_rule.replace('xpath:', ''), | ||||
|                                                                     html_content=self.fetcher.content, | ||||
|                                                                     append_pretty_line_formatting=not watch.is_source_type_url, | ||||
|                                                                     is_rss=is_rss)) | ||||
|                                 html_content += future.result() | ||||
|                                                                     is_rss=is_rss) | ||||
|  | ||||
|                         elif filter_rule.startswith('xpath1:'): | ||||
|                             with ProcessPoolExecutor() as executor: | ||||
|                                 # Use functools.partial to create a callable with arguments - anything using bs4/lxml etc is quite "leaky" | ||||
|                                 future = executor.submit(partial(html_tools.xpath1_filter, xpath_filter=filter_rule.replace('xpath1:', ''), | ||||
|                                                                     html_content=self.fetcher.content, | ||||
|                                                                     append_pretty_line_formatting=not watch.is_source_type_url, | ||||
|                                                                     is_rss=is_rss)) | ||||
|                                 html_content += future.result() | ||||
|                             html_content += html_tools.xpath1_filter(xpath_filter=filter_rule.replace('xpath1:', ''), | ||||
|                                                                      html_content=self.fetcher.content, | ||||
|                                                                      append_pretty_line_formatting=not watch.is_source_type_url, | ||||
|                                                                      is_rss=is_rss) | ||||
|                         else: | ||||
|                             with ProcessPoolExecutor() as executor: | ||||
|                                 # Use functools.partial to create a callable with arguments - anything using bs4/lxml etc is quite "leaky" | ||||
|                                 # CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text | ||||
|                                 future = executor.submit(partial(html_tools.include_filters, include_filters=filter_rule, | ||||
|                             html_content += html_tools.include_filters(include_filters=filter_rule, | ||||
|                                                                        html_content=self.fetcher.content, | ||||
|                                                                        append_pretty_line_formatting=not watch.is_source_type_url)) | ||||
|                                 html_content += future.result() | ||||
|                                                                        append_pretty_line_formatting=not watch.is_source_type_url) | ||||
|  | ||||
|                     if not html_content.strip(): | ||||
|                         raise FilterNotFoundInResponse(msg=include_filters_rule, screenshot=self.fetcher.screenshot, xpath_data=self.fetcher.xpath_data) | ||||
| @@ -210,34 +195,21 @@ class perform_site_check(difference_detection_processor): | ||||
|                 else: | ||||
|                     # extract text | ||||
|                     do_anchor = self.datastore.data["settings"]["application"].get("render_anchor_tag_content", False) | ||||
|                     with ProcessPoolExecutor() as executor: | ||||
|                         # Use functools.partial to create a callable with arguments - anything using bs4/lxml etc is quite "leaky" | ||||
|                         # CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text | ||||
|                         future = executor.submit(partial(html_tools.html_to_text, html_content=html_content, | ||||
|                             render_anchor_tag_content=do_anchor, | ||||
|                             is_rss=is_rss)) #1874 activate the <title workaround hack | ||||
|                         stripped_text_from_html = future.result() | ||||
|  | ||||
|                     stripped_text_from_html = html_tools.html_to_text(html_content=html_content, | ||||
|                                                                       render_anchor_tag_content=do_anchor, | ||||
|                                                                       is_rss=is_rss)  # 1874 activate the <title workaround hack | ||||
|  | ||||
|         if watch.get('trim_text_whitespace'): | ||||
|             stripped_text_from_html = '\n'.join(line.strip() for line in stripped_text_from_html.replace("\n\n", "\n").splitlines()) | ||||
|  | ||||
|         if watch.get('remove_duplicate_lines'): | ||||
|             stripped_text_from_html = '\n'.join(dict.fromkeys(line.strip() for line in stripped_text_from_html.replace("\n\n", "\n").splitlines())) | ||||
|  | ||||
|         if watch.get('sort_text_alphabetically'): | ||||
|             # Note: Because a <p>something</p> will add an extra line feed to signify the paragraph gap | ||||
|             # we end up with 'Some text\n\n', sorting will add all those extra \n at the start, so we remove them here. | ||||
|             stripped_text_from_html = stripped_text_from_html.replace("\n\n", "\n") | ||||
|             stripped_text_from_html = '\n'.join(sorted(stripped_text_from_html.splitlines(), key=lambda x: x.lower())) | ||||
|  | ||||
|  | ||||
|         # Re #340 - return the content before the 'ignore text' was applied | ||||
|         text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8') | ||||
|         # Also used to calculate/show what was removed | ||||
|         text_content_before_ignored_filter = stripped_text_from_html | ||||
|  | ||||
|         # @todo whitespace coming from missing rtrim()? | ||||
|         # stripped_text_from_html could be based on their preferences, replace the processed text with only that which they want to know about. | ||||
|         # Rewrite's the processing text based on only what diff result they want to see | ||||
|  | ||||
|         if watch.has_special_diff_filter_options_set() and len(watch.history.keys()): | ||||
|             # Now the content comes from the diff-parser and not the returned HTTP traffic, so could be some differences | ||||
|             from changedetectionio import diff | ||||
| @@ -252,12 +224,12 @@ class perform_site_check(difference_detection_processor): | ||||
|                                              line_feed_sep="\n", | ||||
|                                              include_change_type_prefix=False) | ||||
|  | ||||
|             watch.save_last_text_fetched_before_filters(text_content_before_ignored_filter) | ||||
|             watch.save_last_text_fetched_before_filters(text_content_before_ignored_filter.encode('utf-8')) | ||||
|  | ||||
|             if not rendered_diff and stripped_text_from_html: | ||||
|                 # We had some content, but no differences were found | ||||
|                 # Store our new file as the MD5 so it will trigger in the future | ||||
|                 c = hashlib.md5(text_content_before_ignored_filter.translate(None, b'\r\n\t ')).hexdigest() | ||||
|                 c = hashlib.md5(stripped_text_from_html.translate(TRANSLATE_WHITESPACE_TABLE).encode('utf-8')).hexdigest() | ||||
|                 return False, {'previous_md5': c}, stripped_text_from_html.encode('utf-8') | ||||
|             else: | ||||
|                 stripped_text_from_html = rendered_diff | ||||
| @@ -278,14 +250,6 @@ class perform_site_check(difference_detection_processor): | ||||
|  | ||||
|         update_obj["last_check_status"] = self.fetcher.get_last_status_code() | ||||
|  | ||||
|         # If there's text to skip | ||||
|         # @todo we could abstract out the get_text() to handle this cleaner | ||||
|         text_to_ignore = watch.get('ignore_text', []) + self.datastore.data['settings']['application'].get('global_ignore_text', []) | ||||
|         if len(text_to_ignore): | ||||
|             stripped_text_from_html = html_tools.strip_ignore_text(stripped_text_from_html, text_to_ignore) | ||||
|         else: | ||||
|             stripped_text_from_html = stripped_text_from_html.encode('utf8') | ||||
|  | ||||
|         # 615 Extract text by regex | ||||
|         extract_text = watch.get('extract_text', []) | ||||
|         if len(extract_text) > 0: | ||||
| @@ -294,39 +258,53 @@ class perform_site_check(difference_detection_processor): | ||||
|                 # incase they specified something in '/.../x' | ||||
|                 if re.search(PERL_STYLE_REGEX, s_re, re.IGNORECASE): | ||||
|                     regex = html_tools.perl_style_slash_enclosed_regex_to_options(s_re) | ||||
|                     result = re.findall(regex.encode('utf-8'), stripped_text_from_html) | ||||
|                     result = re.findall(regex, stripped_text_from_html) | ||||
|  | ||||
|                     for l in result: | ||||
|                         if type(l) is tuple: | ||||
|                             # @todo - some formatter option default (between groups) | ||||
|                             regex_matched_output += list(l) + [b'\n'] | ||||
|                             regex_matched_output += list(l) + ['\n'] | ||||
|                         else: | ||||
|                             # @todo - some formatter option default (between each ungrouped result) | ||||
|                             regex_matched_output += [l] + [b'\n'] | ||||
|                             regex_matched_output += [l] + ['\n'] | ||||
|                 else: | ||||
|                     # Doesnt look like regex, just hunt for plaintext and return that which matches | ||||
|                     # `stripped_text_from_html` will be bytes, so we must encode s_re also to bytes | ||||
|                     r = re.compile(re.escape(s_re.encode('utf-8')), re.IGNORECASE) | ||||
|                     r = re.compile(re.escape(s_re), re.IGNORECASE) | ||||
|                     res = r.findall(stripped_text_from_html) | ||||
|                     if res: | ||||
|                         for match in res: | ||||
|                             regex_matched_output += [match] + [b'\n'] | ||||
|                             regex_matched_output += [match] + ['\n'] | ||||
|  | ||||
|             ########################################################## | ||||
|             stripped_text_from_html = b'' | ||||
|             text_content_before_ignored_filter = b'' | ||||
|             stripped_text_from_html = '' | ||||
|  | ||||
|             if regex_matched_output: | ||||
|                 # @todo some formatter for presentation? | ||||
|                 stripped_text_from_html = b''.join(regex_matched_output) | ||||
|                 text_content_before_ignored_filter = stripped_text_from_html | ||||
|                 stripped_text_from_html = ''.join(regex_matched_output) | ||||
|  | ||||
|         if watch.get('remove_duplicate_lines'): | ||||
|             stripped_text_from_html = '\n'.join(dict.fromkeys(line for line in stripped_text_from_html.replace("\n\n", "\n").splitlines())) | ||||
|  | ||||
|  | ||||
|         if watch.get('sort_text_alphabetically'): | ||||
|             # Note: Because a <p>something</p> will add an extra line feed to signify the paragraph gap | ||||
|             # we end up with 'Some text\n\n', sorting will add all those extra \n at the start, so we remove them here. | ||||
|             stripped_text_from_html = stripped_text_from_html.replace("\n\n", "\n") | ||||
|             stripped_text_from_html = '\n'.join(sorted(stripped_text_from_html.splitlines(), key=lambda x: x.lower())) | ||||
|  | ||||
| ### CALCULATE MD5 | ||||
|         # If there's text to ignore | ||||
|         text_to_ignore = watch.get('ignore_text', []) + self.datastore.data['settings']['application'].get('global_ignore_text', []) | ||||
|         text_for_checksuming = stripped_text_from_html | ||||
|         if text_to_ignore: | ||||
|             text_for_checksuming = html_tools.strip_ignore_text(stripped_text_from_html, text_to_ignore) | ||||
|  | ||||
|         # Re #133 - if we should strip whitespaces from triggering the change detected comparison | ||||
|         if self.datastore.data['settings']['application'].get('ignore_whitespace', False): | ||||
|             fetched_md5 = hashlib.md5(stripped_text_from_html.translate(None, b'\r\n\t ')).hexdigest() | ||||
|         if text_for_checksuming and self.datastore.data['settings']['application'].get('ignore_whitespace', False): | ||||
|             fetched_md5 = hashlib.md5(text_for_checksuming.translate(TRANSLATE_WHITESPACE_TABLE).encode('utf-8')).hexdigest() | ||||
|         else: | ||||
|             fetched_md5 = hashlib.md5(stripped_text_from_html).hexdigest() | ||||
|             fetched_md5 = hashlib.md5(text_for_checksuming.encode('utf-8')).hexdigest() | ||||
|  | ||||
|         ############ Blocking rules, after checksum ################# | ||||
|         blocked = False | ||||
| @@ -354,19 +332,45 @@ class perform_site_check(difference_detection_processor): | ||||
|             if result: | ||||
|                 blocked = True | ||||
|  | ||||
|         # The main thing that all this at the moment comes down to :) | ||||
|         if watch.get('previous_md5') != fetched_md5: | ||||
|             changed_detected = True | ||||
|         # And check if 'conditions' will let this pass through | ||||
|         if watch.get('conditions') and watch.get('conditions_match_logic'): | ||||
|             conditions_result = execute_ruleset_against_all_plugins(current_watch_uuid=watch.get('uuid'), | ||||
|                                                                     application_datastruct=self.datastore.data, | ||||
|                                                                     ephemeral_data={ | ||||
|                                                                         'text': stripped_text_from_html | ||||
|                                                                     } | ||||
|                                                                     ) | ||||
|  | ||||
|             if not conditions_result.get('result'): | ||||
|                 # Conditions say "Condition not met" so we block it. | ||||
|                 blocked = True | ||||
|  | ||||
|         # Looks like something changed, but did it match all the rules? | ||||
|         if blocked: | ||||
|             changed_detected = False | ||||
|         else: | ||||
|             # The main thing that all this at the moment comes down to :) | ||||
|             if watch.get('previous_md5') != fetched_md5: | ||||
|                 changed_detected = True | ||||
|  | ||||
|             # Always record the new checksum | ||||
|             update_obj["previous_md5"] = fetched_md5 | ||||
|  | ||||
|             # On the first run of a site, watch['previous_md5'] will be None, set it the current one. | ||||
|             if not watch.get('previous_md5'): | ||||
|                 watch['previous_md5'] = fetched_md5 | ||||
|  | ||||
|         logger.debug(f"Watch UUID {watch.get('uuid')} content check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}") | ||||
|  | ||||
|         if changed_detected: | ||||
|             if watch.get('check_unique_lines', False): | ||||
|                 has_unique_lines = watch.lines_contain_something_unique_compared_to_history(lines=stripped_text_from_html.splitlines()) | ||||
|                 ignore_whitespace = self.datastore.data['settings']['application'].get('ignore_whitespace') | ||||
|  | ||||
|                 has_unique_lines = watch.lines_contain_something_unique_compared_to_history( | ||||
|                     lines=stripped_text_from_html.splitlines(), | ||||
|                     ignore_whitespace=ignore_whitespace | ||||
|                 ) | ||||
|  | ||||
|                 # One or more lines? unsure? | ||||
|                 if not has_unique_lines: | ||||
|                     logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} didnt have anything new setting change_detected=False") | ||||
| @@ -374,11 +378,6 @@ class perform_site_check(difference_detection_processor): | ||||
|                 else: | ||||
|                     logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} had unique content") | ||||
|  | ||||
|         # Always record the new checksum | ||||
|         update_obj["previous_md5"] = fetched_md5 | ||||
|  | ||||
|         # On the first run of a site, watch['previous_md5'] will be None, set it the current one. | ||||
|         if not watch.get('previous_md5'): | ||||
|             watch['previous_md5'] = fetched_md5 | ||||
|  | ||||
|         return changed_detected, update_obj, text_content_before_ignored_filter | ||||
|         # stripped_text_from_html - Everything after filters and NO 'ignored' content | ||||
|         return changed_detected, update_obj, stripped_text_from_html | ||||
|   | ||||
| @@ -16,25 +16,31 @@ echo "---------------------------------- SOCKS5 -------------------" | ||||
| docker run --network changedet-network \ | ||||
|   -v `pwd`/tests/proxy_socks5/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json \ | ||||
|   --rm \ | ||||
|   -e "FLASK_SERVER_NAME=cdio" \ | ||||
|   --hostname cdio \ | ||||
|   -e "SOCKSTEST=proxiesjson" \ | ||||
|   test-changedetectionio \ | ||||
|   bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py' | ||||
|   bash -c 'cd changedetectionio && pytest --live-server-host=0.0.0.0 --live-server-port=5004  -s tests/proxy_socks5/test_socks5_proxy_sources.py' | ||||
|  | ||||
| # SOCKS5 related - by manually entering in UI | ||||
| docker run --network changedet-network \ | ||||
|   --rm \ | ||||
|   -e "FLASK_SERVER_NAME=cdio" \ | ||||
|   --hostname cdio \ | ||||
|   -e "SOCKSTEST=manual" \ | ||||
|   test-changedetectionio \ | ||||
|   bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy.py' | ||||
|   bash -c 'cd changedetectionio && pytest --live-server-host=0.0.0.0 --live-server-port=5004  -s tests/proxy_socks5/test_socks5_proxy.py' | ||||
|  | ||||
| # SOCKS5 related - test from proxies.json via playwright - NOTE- PLAYWRIGHT DOESNT SUPPORT AUTHENTICATING PROXY | ||||
| docker run --network changedet-network \ | ||||
|   -e "SOCKSTEST=manual-playwright" \ | ||||
|   --hostname cdio \ | ||||
|   -e "FLASK_SERVER_NAME=cdio" \ | ||||
|   -v `pwd`/tests/proxy_socks5/proxies.json-example-noauth:/app/changedetectionio/test-datastore/proxies.json \ | ||||
|   -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" \ | ||||
|   --rm \ | ||||
|   test-changedetectionio \ | ||||
|   bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py' | ||||
|   bash -c 'cd changedetectionio && pytest --live-server-host=0.0.0.0 --live-server-port=5004  -s tests/proxy_socks5/test_socks5_proxy_sources.py' | ||||
|  | ||||
| echo "socks5 server logs" | ||||
| docker logs socks5proxy | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| <?xml version="1.0" encoding="UTF-8" standalone="no"?> | ||||
| <svg | ||||
|    version="1.1" | ||||
|    id="Layer_1" | ||||
|    id="copy" | ||||
|    x="0px" | ||||
|    y="0px" | ||||
|    viewBox="0 0 115.77 122.88" | ||||
|   | ||||
| Before Width: | Height: | Size: 2.5 KiB After Width: | Height: | Size: 2.5 KiB | 
| @@ -6,7 +6,7 @@ | ||||
|    height="7.5005589" | ||||
|    width="11.248507" | ||||
|    version="1.1" | ||||
|    id="Layer_1" | ||||
|    id="email" | ||||
|    viewBox="0 0 7.1975545 4.7993639" | ||||
|    xml:space="preserve" | ||||
|    xmlns="http://www.w3.org/2000/svg" | ||||
|   | ||||
| Before Width: | Height: | Size: 1.9 KiB After Width: | Height: | Size: 1.9 KiB | 
| Before Width: | Height: | Size: 569 B After Width: | Height: | Size: 569 B | 
| Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 14 KiB | 
| Before Width: | Height: | Size: 6.2 KiB After Width: | Height: | Size: 6.2 KiB | 
							
								
								
									
										225
									
								
								changedetectionio/static/images/schedule.svg
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,225 @@ | ||||
| <?xml version="1.0" encoding="UTF-8" standalone="no"?> | ||||
| <svg | ||||
|    version="1.1" | ||||
|    id="schedule" | ||||
|    x="0px" | ||||
|    y="0px" | ||||
|    viewBox="0 0 661.20001 665.40002" | ||||
|    xml:space="preserve" | ||||
|    width="661.20001" | ||||
|    height="665.40002" | ||||
|    inkscape:version="1.1.2 (0a00cf5339, 2022-02-04)" | ||||
|    sodipodi:docname="schedule.svg" | ||||
|    xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | ||||
|    xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | ||||
|    xmlns="http://www.w3.org/2000/svg" | ||||
|    xmlns:svg="http://www.w3.org/2000/svg"><defs | ||||
|    id="defs77" /><sodipodi:namedview | ||||
|    id="namedview75" | ||||
|    pagecolor="#ffffff" | ||||
|    bordercolor="#666666" | ||||
|    borderopacity="1.0" | ||||
|    inkscape:pageshadow="2" | ||||
|    inkscape:pageopacity="0.0" | ||||
|    inkscape:pagecheckerboard="0" | ||||
|    showgrid="false" | ||||
|    fit-margin-top="0" | ||||
|    fit-margin-left="0" | ||||
|    fit-margin-right="0" | ||||
|    fit-margin-bottom="0" | ||||
|    inkscape:zoom="1.2458671" | ||||
|    inkscape:cx="300.59386" | ||||
|    inkscape:cy="332.29869" | ||||
|    inkscape:window-width="1920" | ||||
|    inkscape:window-height="1051" | ||||
|    inkscape:window-x="1920" | ||||
|    inkscape:window-y="0" | ||||
|    inkscape:window-maximized="1" | ||||
|    inkscape:current-layer="g72" /> <style | ||||
|    type="text/css" | ||||
|    id="style2"> .st0{fill:#FFFFFF;} .st1{fill:#C1272D;} .st2{fill:#991D26;} .st3{fill:#CCCCCC;} .st4{fill:#E6E6E6;} .st5{fill:#F7931E;} .st6{fill:#F2F2F2;} .st7{fill:none;stroke:#999999;stroke-width:17.9763;stroke-linecap:round;stroke-miterlimit:10;} .st8{fill:none;stroke:#333333;stroke-width:8.9882;stroke-linecap:round;stroke-miterlimit:10;} .st9{fill:none;stroke:#C1272D;stroke-width:5.9921;stroke-linecap:round;stroke-miterlimit:10;} .st10{fill:#245F7F;} </style> <g | ||||
|    id="g72" | ||||
|    transform="translate(-149.4,-147.3)"> <path | ||||
|    class="st0" | ||||
|    d="M 601.2,699.8 H 205 c -30.7,0 -55.6,-24.9 -55.6,-55.6 V 248 c 0,-30.7 24.9,-55.6 55.6,-55.6 h 396.2 c 30.7,0 55.6,24.9 55.6,55.6 v 396.2 c 0,30.7 -24.9,55.6 -55.6,55.6 z" | ||||
|    id="path4" | ||||
|    style="fill:#dfdfdf;fill-opacity:1" /> <path | ||||
|    class="st1" | ||||
|    d="M 601.2,192.4 H 205 c -30.7,0 -55.6,24.9 -55.6,55.6 v 88.5 H 656.8 V 248 c 0,-30.7 -24.9,-55.6 -55.6,-55.6 z" | ||||
|    id="path6" | ||||
|    style="fill:#d62128;fill-opacity:1" /> <circle | ||||
|    class="st2" | ||||
|    cx="253.3" | ||||
|    cy="264.5" | ||||
|    r="36.700001" | ||||
|    id="circle8" /> <circle | ||||
|    class="st2" | ||||
|    cx="551.59998" | ||||
|    cy="264.5" | ||||
|    r="36.700001" | ||||
|    id="circle10" /> <path | ||||
|    class="st3" | ||||
|    d="m 253.3,275.7 v 0 c -11.8,0 -21.3,-9.6 -21.3,-21.3 v -85.8 c 0,-11.8 9.6,-21.3 21.3,-21.3 v 0 c 11.8,0 21.3,9.6 21.3,21.3 v 85.8 c 0,11.8 -9.5,21.3 -21.3,21.3 z" | ||||
|    id="path12" /> <path | ||||
|    class="st3" | ||||
|    d="m 551.6,275.7 v 0 c -11.8,0 -21.3,-9.6 -21.3,-21.3 v -85.8 c 0,-11.8 9.6,-21.3 21.3,-21.3 v 0 c 11.8,0 21.3,9.6 21.3,21.3 v 85.8 c 0.1,11.8 -9.5,21.3 -21.3,21.3 z" | ||||
|    id="path14" /> <rect | ||||
|    x="215.7" | ||||
|    y="370.89999" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect16" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="313" | ||||
|    y="370.89999" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect18" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="410.20001" | ||||
|    y="370.89999" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect20" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="507.5" | ||||
|    y="370.89999" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect22" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="215.7" | ||||
|    y="465" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect24" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="313" | ||||
|    y="465" | ||||
|    class="st1" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect26" | ||||
|    style="fill:#27c12b;fill-opacity:1" /> <rect | ||||
|    x="410.20001" | ||||
|    y="465" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect28" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="507.5" | ||||
|    y="465" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect30" /> <rect | ||||
|    x="215.7" | ||||
|    y="559.09998" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect32" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="313" | ||||
|    y="559.09998" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect34" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="410.20001" | ||||
|    y="559.09998" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect36" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="507.5" | ||||
|    y="559.09998" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect38" /> <g | ||||
|    id="g70"> <circle | ||||
|    class="st5" | ||||
|    cx="621.90002" | ||||
|    cy="624" | ||||
|    r="188.7" | ||||
|    id="circle40" /> <circle | ||||
|    class="st0" | ||||
|    cx="621.90002" | ||||
|    cy="624" | ||||
|    r="148" | ||||
|    id="circle42" /> <path | ||||
|    class="st6" | ||||
|    d="m 486.6,636.8 c 0,-81.7 66.3,-148 148,-148 37.6,0 72,14.1 98.1,37.2 -27.1,-30.6 -66.7,-49.9 -110.8,-49.9 -81.7,0 -148,66.3 -148,148 0,44.1 19.3,83.7 49.9,110.8 -23.1,-26.2 -37.2,-60.5 -37.2,-98.1 z" | ||||
|    id="path44" /> <polyline | ||||
|    class="st7" | ||||
|    points="621.9,530.4 621.9,624 559,624  " | ||||
|    id="polyline46" /> <g | ||||
|    id="g64"> <line | ||||
|    class="st8" | ||||
|    x1="621.90002" | ||||
|    y1="508.29999" | ||||
|    x2="621.90002" | ||||
|    y2="497.10001" | ||||
|    id="line48" /> <line | ||||
|    class="st8" | ||||
|    x1="621.90002" | ||||
|    y1="756.29999" | ||||
|    x2="621.90002" | ||||
|    y2="745.09998" | ||||
|    id="line50" /> <line | ||||
|    class="st8" | ||||
|    x1="740.29999" | ||||
|    y1="626.70001" | ||||
|    x2="751.5" | ||||
|    y2="626.70001" | ||||
|    id="line52" /> <line | ||||
|    class="st8" | ||||
|    x1="492.29999" | ||||
|    y1="626.70001" | ||||
|    x2="503.5" | ||||
|    y2="626.70001" | ||||
|    id="line54" /> <line | ||||
|    class="st8" | ||||
|    x1="705.59998" | ||||
|    y1="710.40002" | ||||
|    x2="713.5" | ||||
|    y2="718.29999" | ||||
|    id="line56" /> <line | ||||
|    class="st8" | ||||
|    x1="530.29999" | ||||
|    y1="535.09998" | ||||
|    x2="538.20001" | ||||
|    y2="543" | ||||
|    id="line58" /> <line | ||||
|    class="st8" | ||||
|    x1="538.20001" | ||||
|    y1="710.40002" | ||||
|    x2="530.29999" | ||||
|    y2="718.29999" | ||||
|    id="line60" /> <line | ||||
|    class="st8" | ||||
|    x1="713.5" | ||||
|    y1="535.09998" | ||||
|    x2="705.59998" | ||||
|    y2="543" | ||||
|    id="line62" /> </g> <line | ||||
|    class="st9" | ||||
|    x1="604.40002" | ||||
|    y1="606.29999" | ||||
|    x2="684.5" | ||||
|    y2="687.40002" | ||||
|    id="line66" /> <circle | ||||
|    class="st10" | ||||
|    cx="621.90002" | ||||
|    cy="624" | ||||
|    r="16.1" | ||||
|    id="circle68" /> </g> </g> </svg> | ||||
| After Width: | Height: | Size: 5.9 KiB | 
| @@ -221,7 +221,7 @@ $(document).ready(function () { | ||||
|                     // If you switch to "Click X,y" after an element here is setup, it will give the last co-ords anyway | ||||
|                     //if (x['isClickable'] || x['tagName'].startsWith('h') || x['tagName'] === 'a' || x['tagName'] === 'button' || x['tagtype'] === 'submit' || x['tagtype'] === 'checkbox' || x['tagtype'] === 'radio' || x['tagtype'] === 'li') { | ||||
|                         $('select', first_available).val('Click element').change(); | ||||
|                         $('input[type=text]', first_available).first().val(x['xpath']); | ||||
|                         $('input[type=text]', first_available).first().val(x['xpath']).focus(); | ||||
|                         found_something = true; | ||||
|                     //} | ||||
|                 } | ||||
| @@ -305,7 +305,7 @@ $(document).ready(function () { | ||||
|  | ||||
|         if ($(this).val() === 'Click X,Y' && last_click_xy['x'] > 0 && $(elem_value).val().length === 0) { | ||||
|             // @todo handle scale | ||||
|             $(elem_value).val(last_click_xy['x'] + ',' + last_click_xy['y']); | ||||
|             $(elem_value).val(last_click_xy['x'] + ',' + last_click_xy['y']).focus(); | ||||
|         } | ||||
|     }).change(); | ||||
|  | ||||
|   | ||||
							
								
								
									
										154
									
								
								changedetectionio/static/js/conditions.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,154 @@ | ||||
| $(document).ready(function () { | ||||
|     // Function to set up button event handlers | ||||
|     function setupButtonHandlers() { | ||||
|         // Unbind existing handlers first to prevent duplicates | ||||
|         $(".addRuleRow, .removeRuleRow, .verifyRuleRow").off("click"); | ||||
|          | ||||
|         // Add row button handler | ||||
|         $(".addRuleRow").on("click", function(e) { | ||||
|             e.preventDefault(); | ||||
|              | ||||
|             let currentRow = $(this).closest(".fieldlist-row"); | ||||
|              | ||||
|             // Clone without events | ||||
|             let newRow = currentRow.clone(false); | ||||
|              | ||||
|             // Reset input values in the cloned row | ||||
|             newRow.find("input").val(""); | ||||
|             newRow.find("select").prop("selectedIndex", 0); | ||||
|              | ||||
|             // Insert the new row after the current one | ||||
|             currentRow.after(newRow); | ||||
|              | ||||
|             // Reindex all rows | ||||
|             reindexRules(); | ||||
|         }); | ||||
|          | ||||
|         // Remove row button handler | ||||
|         $(".removeRuleRow").on("click", function(e) { | ||||
|             e.preventDefault(); | ||||
|              | ||||
|             // Only remove if there's more than one row | ||||
|             if ($("#rulesTable .fieldlist-row").length > 1) { | ||||
|                 $(this).closest(".fieldlist-row").remove(); | ||||
|                 reindexRules(); | ||||
|             } | ||||
|         }); | ||||
|          | ||||
|         // Verify rule button handler | ||||
|         $(".verifyRuleRow").on("click", function(e) { | ||||
|             e.preventDefault(); | ||||
|              | ||||
|             let row = $(this).closest(".fieldlist-row"); | ||||
|             let field = row.find("select[name$='field']").val(); | ||||
|             let operator = row.find("select[name$='operator']").val(); | ||||
|             let value = row.find("input[name$='value']").val(); | ||||
|              | ||||
|             // Validate that all fields are filled | ||||
|             if (!field || field === "None" || !operator || operator === "None" || !value) { | ||||
|                 alert("Please fill in all fields (Field, Operator, and Value) before verifying."); | ||||
|                 return; | ||||
|             } | ||||
|  | ||||
|              | ||||
|             // Create a rule object | ||||
|             let rule = { | ||||
|                 field: field, | ||||
|                 operator: operator, | ||||
|                 value: value | ||||
|             }; | ||||
|              | ||||
|             // Show a spinner or some indication that verification is in progress | ||||
|             const $button = $(this); | ||||
|             const originalHTML = $button.html(); | ||||
|             $button.html("⌛").prop("disabled", true); | ||||
|              | ||||
|             // Collect form data - similar to request_textpreview_update() in watch-settings.js | ||||
|             let formData = new FormData(); | ||||
|             $('#edit-text-filter textarea, #edit-text-filter input').each(function() { | ||||
|                 const $element = $(this); | ||||
|                 const name = $element.attr('name'); | ||||
|                 if (name) { | ||||
|                     if ($element.is(':checkbox')) { | ||||
|                         formData.append(name, $element.is(':checked') ? $element.val() : false); | ||||
|                     } else { | ||||
|                         formData.append(name, $element.val()); | ||||
|                     } | ||||
|                 } | ||||
|             }); | ||||
|              | ||||
|             // Also collect select values | ||||
|             $('#edit-text-filter select').each(function() { | ||||
|                 const $element = $(this); | ||||
|                 const name = $element.attr('name'); | ||||
|                 if (name) { | ||||
|                     formData.append(name, $element.val()); | ||||
|                 } | ||||
|             }); | ||||
|  | ||||
|  | ||||
|             // Send the request to verify the rule | ||||
|             $.ajax({ | ||||
|                 url: verify_condition_rule_url+"?"+ new URLSearchParams({ rule: JSON.stringify(rule) }).toString(), | ||||
|                 type: "POST", | ||||
|                 data: formData, | ||||
|                 processData: false, // Prevent jQuery from converting FormData to a string | ||||
|                 contentType: false, // Let the browser set the correct content type | ||||
|                 success: function (response) { | ||||
|                     if (response.status === "success") { | ||||
|                         if(rule['field'] !== "page_filtered_text") { | ||||
|                             // A little debug helper for the user | ||||
|                             $('#verify-state-text').text(`${rule['field']} was value "${response.data[rule['field']]}"`) | ||||
|                         } | ||||
|                         if (response.result) { | ||||
|                             alert("✅ Condition PASSES verification against current snapshot!"); | ||||
|                         } else { | ||||
|                             alert("❌ Condition FAILS verification against current snapshot."); | ||||
|                         } | ||||
|                     } else { | ||||
|                         alert("Error: " + response.message); | ||||
|                     } | ||||
|                     $button.html(originalHTML).prop("disabled", false); | ||||
|                 }, | ||||
|                 error: function (xhr) { | ||||
|                     let errorMsg = "Error verifying condition."; | ||||
|                     if (xhr.responseJSON && xhr.responseJSON.message) { | ||||
|                         errorMsg = xhr.responseJSON.message; | ||||
|                     } | ||||
|                     alert(errorMsg); | ||||
|                     $button.html(originalHTML).prop("disabled", false); | ||||
|                 } | ||||
|             }); | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     // Function to reindex form elements and re-setup event handlers | ||||
|     function reindexRules() { | ||||
|         // Unbind all button handlers first | ||||
|         $(".addRuleRow, .removeRuleRow, .verifyRuleRow").off("click"); | ||||
|          | ||||
|         // Reindex all form elements | ||||
|         $("#rulesTable .fieldlist-row").each(function(index) { | ||||
|             $(this).find("select, input").each(function() { | ||||
|                 let oldName = $(this).attr("name"); | ||||
|                 let oldId = $(this).attr("id"); | ||||
|  | ||||
|                 if (oldName) { | ||||
|                     let newName = oldName.replace(/\d+/, index); | ||||
|                     $(this).attr("name", newName); | ||||
|                 } | ||||
|  | ||||
|                 if (oldId) { | ||||
|                     let newId = oldId.replace(/\d+/, index); | ||||
|                     $(this).attr("id", newId); | ||||
|                 } | ||||
|             }); | ||||
|         }); | ||||
|          | ||||
|         // Reattach event handlers after reindexing | ||||
|         setupButtonHandlers(); | ||||
|     } | ||||
|  | ||||
|     // Initial setup of button handlers | ||||
|     setupButtonHandlers(); | ||||
| }); | ||||
| @@ -24,5 +24,19 @@ $(document).ready(function () { | ||||
|         $(target).toggle(); | ||||
|     }); | ||||
|  | ||||
|     // Time zone config related | ||||
|     $(".local-time").each(function (e) { | ||||
|         $(this).text(new Date($(this).data("utc")).toLocaleString()); | ||||
|     }) | ||||
|  | ||||
|     const timezoneInput = $('#application-timezone'); | ||||
|     if(timezoneInput.length) { | ||||
|         const timezone = Intl.DateTimeFormat().resolvedOptions().timeZone; | ||||
|         if (!timezoneInput.val().trim()) { | ||||
|             timezoneInput.val(timezone); | ||||
|             timezoneInput.after('<div class="timezone-message">The timezone was set from your browser, <strong>be sure to press save!</strong></div>'); | ||||
|         } | ||||
|     } | ||||
|  | ||||
| }); | ||||
|  | ||||
|   | ||||
| @@ -1,56 +0,0 @@ | ||||
| /** | ||||
|  * debounce | ||||
|  * @param {integer} milliseconds This param indicates the number of milliseconds | ||||
|  *     to wait after the last call before calling the original function. | ||||
|  * @param {object} What "this" refers to in the returned function. | ||||
|  * @return {function} This returns a function that when called will wait the | ||||
|  *     indicated number of milliseconds after the last call before | ||||
|  *     calling the original function. | ||||
|  */ | ||||
| Function.prototype.debounce = function (milliseconds, context) { | ||||
|     var baseFunction = this, | ||||
|         timer = null, | ||||
|         wait = milliseconds; | ||||
|  | ||||
|     return function () { | ||||
|         var self = context || this, | ||||
|             args = arguments; | ||||
|  | ||||
|         function complete() { | ||||
|             baseFunction.apply(self, args); | ||||
|             timer = null; | ||||
|         } | ||||
|  | ||||
|         if (timer) { | ||||
|             clearTimeout(timer); | ||||
|         } | ||||
|  | ||||
|         timer = setTimeout(complete, wait); | ||||
|     }; | ||||
| }; | ||||
|  | ||||
| /** | ||||
| * throttle | ||||
| * @param {integer} milliseconds This param indicates the number of milliseconds | ||||
| *     to wait between calls before calling the original function. | ||||
| * @param {object} What "this" refers to in the returned function. | ||||
| * @return {function} This returns a function that when called will wait the | ||||
| *     indicated number of milliseconds between calls before | ||||
| *     calling the original function. | ||||
| */ | ||||
| Function.prototype.throttle = function (milliseconds, context) { | ||||
|     var baseFunction = this, | ||||
|         lastEventTimestamp = null, | ||||
|         limit = milliseconds; | ||||
|  | ||||
|     return function () { | ||||
|         var self = context || this, | ||||
|             args = arguments, | ||||
|             now = Date.now(); | ||||
|  | ||||
|         if (!lastEventTimestamp || now - lastEventTimestamp >= limit) { | ||||
|             lastEventTimestamp = now; | ||||
|             baseFunction.apply(self, args); | ||||
|         } | ||||
|     }; | ||||
| }; | ||||
| @@ -1,45 +1,52 @@ | ||||
| $(document).ready(function() { | ||||
| $(document).ready(function () { | ||||
|  | ||||
|   $('#add-email-helper').click(function (e) { | ||||
|     e.preventDefault(); | ||||
|     email = prompt("Destination email"); | ||||
|     if(email) { | ||||
|       var n = $(".notification-urls"); | ||||
|       var p=email_notification_prefix; | ||||
|       $(n).val( $.trim( $(n).val() )+"\n"+email_notification_prefix+email ); | ||||
|     } | ||||
|   }); | ||||
|  | ||||
|   $('#send-test-notification').click(function (e) { | ||||
|     e.preventDefault(); | ||||
|  | ||||
|     data = { | ||||
|       notification_body: $('#notification_body').val(), | ||||
|       notification_format: $('#notification_format').val(), | ||||
|       notification_title: $('#notification_title').val(), | ||||
|       notification_urls: $('.notification-urls').val(), | ||||
|       tags: $('#tags').val(), | ||||
|       window_url: window.location.href, | ||||
|     } | ||||
|  | ||||
|  | ||||
|     $.ajax({ | ||||
|       type: "POST", | ||||
|       url: notification_base_url, | ||||
|       data : data, | ||||
|         statusCode: { | ||||
|         400: function() { | ||||
|             // More than likely the CSRF token was lost when the server restarted | ||||
|           alert("There was a problem processing the request, please reload the page."); | ||||
|     $('#add-email-helper').click(function (e) { | ||||
|         e.preventDefault(); | ||||
|         email = prompt("Destination email"); | ||||
|         if (email) { | ||||
|             var n = $(".notification-urls"); | ||||
|             var p = email_notification_prefix; | ||||
|             $(n).val($.trim($(n).val()) + "\n" + email_notification_prefix + email); | ||||
|         } | ||||
|       } | ||||
|     }).done(function(data){ | ||||
|       console.log(data); | ||||
|       alert(data); | ||||
|     }).fail(function(data){ | ||||
|       console.log(data); | ||||
|       alert('There was an error communicating with the server.'); | ||||
|     }) | ||||
|   }); | ||||
|     }); | ||||
|  | ||||
|     $('#send-test-notification').click(function (e) { | ||||
|         e.preventDefault(); | ||||
|  | ||||
|         data = { | ||||
|             notification_body: $('#notification_body').val(), | ||||
|             notification_format: $('#notification_format').val(), | ||||
|             notification_title: $('#notification_title').val(), | ||||
|             notification_urls: $('.notification-urls').val(), | ||||
|             tags: $('#tags').val(), | ||||
|             window_url: window.location.href, | ||||
|         } | ||||
|  | ||||
|         $('.notifications-wrapper .spinner').fadeIn(); | ||||
|         $('#notification-test-log').show(); | ||||
|         $.ajax({ | ||||
|             type: "POST", | ||||
|             url: notification_base_url, | ||||
|             data: data, | ||||
|             statusCode: { | ||||
|                 400: function (data) { | ||||
|                     $("#notification-test-log>span").text(data.responseText); | ||||
|                 }, | ||||
|             } | ||||
|         }).done(function (data) { | ||||
|             $("#notification-test-log>span").text(data); | ||||
|         }).fail(function (jqXHR, textStatus, errorThrown) { | ||||
|             // Handle connection refused or other errors | ||||
|             if (textStatus === "error" && errorThrown === "") { | ||||
|                 console.error("Connection refused or server unreachable"); | ||||
|                 $("#notification-test-log>span").text("Error: Connection refused or server is unreachable."); | ||||
|             } else { | ||||
|                 console.error("Error:", textStatus, errorThrown); | ||||
|                 $("#notification-test-log>span").text("An error occurred: " + textStatus); | ||||
|             } | ||||
|         }).always(function () { | ||||
|             $('.notifications-wrapper .spinner').hide(); | ||||
|         }) | ||||
|     }); | ||||
| }); | ||||
|  | ||||
|   | ||||
							
								
								
									
										196
									
								
								changedetectionio/static/js/plugins.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,196 @@ | ||||
| (function ($) { | ||||
|     /** | ||||
|      * debounce | ||||
|      * @param {integer} milliseconds This param indicates the number of milliseconds | ||||
|      *     to wait after the last call before calling the original function. | ||||
|      * @param {object} What "this" refers to in the returned function. | ||||
|      * @return {function} This returns a function that when called will wait the | ||||
|      *     indicated number of milliseconds after the last call before | ||||
|      *     calling the original function. | ||||
|      */ | ||||
|     Function.prototype.debounce = function (milliseconds, context) { | ||||
|         var baseFunction = this, | ||||
|             timer = null, | ||||
|             wait = milliseconds; | ||||
|  | ||||
|         return function () { | ||||
|             var self = context || this, | ||||
|                 args = arguments; | ||||
|  | ||||
|             function complete() { | ||||
|                 baseFunction.apply(self, args); | ||||
|                 timer = null; | ||||
|             } | ||||
|  | ||||
|             if (timer) { | ||||
|                 clearTimeout(timer); | ||||
|             } | ||||
|  | ||||
|             timer = setTimeout(complete, wait); | ||||
|         }; | ||||
|     }; | ||||
|  | ||||
|     /** | ||||
|      * throttle | ||||
|      * @param {integer} milliseconds This param indicates the number of milliseconds | ||||
|      *     to wait between calls before calling the original function. | ||||
|      * @param {object} What "this" refers to in the returned function. | ||||
|      * @return {function} This returns a function that when called will wait the | ||||
|      *     indicated number of milliseconds between calls before | ||||
|      *     calling the original function. | ||||
|      */ | ||||
|     Function.prototype.throttle = function (milliseconds, context) { | ||||
|         var baseFunction = this, | ||||
|             lastEventTimestamp = null, | ||||
|             limit = milliseconds; | ||||
|  | ||||
|         return function () { | ||||
|             var self = context || this, | ||||
|                 args = arguments, | ||||
|                 now = Date.now(); | ||||
|  | ||||
|             if (!lastEventTimestamp || now - lastEventTimestamp >= limit) { | ||||
|                 lastEventTimestamp = now; | ||||
|                 baseFunction.apply(self, args); | ||||
|             } | ||||
|         }; | ||||
|     }; | ||||
|  | ||||
|     $.fn.highlightLines = function (configurations) { | ||||
|         return this.each(function () { | ||||
|             const $pre = $(this); | ||||
|             const textContent = $pre.text(); | ||||
|             const lines = textContent.split(/\r?\n/); // Handles both \n and \r\n line endings | ||||
|  | ||||
|             // Build a map of line numbers to styles | ||||
|             const lineStyles = {}; | ||||
|  | ||||
|             configurations.forEach(config => { | ||||
|                 const {color, lines: lineNumbers} = config; | ||||
|                 lineNumbers.forEach(lineNumber => { | ||||
|                     lineStyles[lineNumber] = color; | ||||
|                 }); | ||||
|             }); | ||||
|  | ||||
|             // Function to escape HTML characters | ||||
|             function escapeHtml(text) { | ||||
|                 return text.replace(/[&<>"'`=\/]/g, function (s) { | ||||
|                     return "&#" + s.charCodeAt(0) + ";"; | ||||
|                 }); | ||||
|             } | ||||
|  | ||||
|             // Process each line | ||||
|             const processedLines = lines.map((line, index) => { | ||||
|                 const lineNumber = index + 1; // Line numbers start at 1 | ||||
|                 const escapedLine = escapeHtml(line); | ||||
|                 const color = lineStyles[lineNumber]; | ||||
|  | ||||
|                 if (color) { | ||||
|                     // Wrap the line in a span with inline style | ||||
|                     return `<span style="background-color: ${color}">${escapedLine}</span>`; | ||||
|                 } else { | ||||
|                     return escapedLine; | ||||
|                 } | ||||
|             }); | ||||
|  | ||||
|             // Join the lines back together | ||||
|             const newContent = processedLines.join('\n'); | ||||
|  | ||||
|             // Set the new content as HTML | ||||
|             $pre.html(newContent); | ||||
|         }); | ||||
|     }; | ||||
|     $.fn.miniTabs = function (tabsConfig, options) { | ||||
|         const settings = { | ||||
|             tabClass: 'minitab', | ||||
|             tabsContainerClass: 'minitabs', | ||||
|             activeClass: 'active', | ||||
|             ...(options || {}) | ||||
|         }; | ||||
|  | ||||
|         return this.each(function () { | ||||
|             const $wrapper = $(this); | ||||
|             const $contents = $wrapper.find('div[id]').hide(); | ||||
|             const $tabsContainer = $('<div>', {class: settings.tabsContainerClass}).prependTo($wrapper); | ||||
|  | ||||
|             // Generate tabs | ||||
|             Object.entries(tabsConfig).forEach(([tabTitle, contentSelector], index) => { | ||||
|                 const $content = $wrapper.find(contentSelector); | ||||
|                 if (index === 0) $content.show(); // Show first content by default | ||||
|  | ||||
|                 $('<a>', { | ||||
|                     class: `${settings.tabClass}${index === 0 ? ` ${settings.activeClass}` : ''}`, | ||||
|                     text: tabTitle, | ||||
|                     'data-target': contentSelector | ||||
|                 }).appendTo($tabsContainer); | ||||
|             }); | ||||
|  | ||||
|             // Tab click event | ||||
|             $tabsContainer.on('click', `.${settings.tabClass}`, function (e) { | ||||
|                 e.preventDefault(); | ||||
|                 const $tab = $(this); | ||||
|                 const target = $tab.data('target'); | ||||
|  | ||||
|                 // Update active tab | ||||
|                 $tabsContainer.find(`.${settings.tabClass}`).removeClass(settings.activeClass); | ||||
|                 $tab.addClass(settings.activeClass); | ||||
|  | ||||
|                 // Show/hide content | ||||
|                 $contents.hide(); | ||||
|                 $wrapper.find(target).show(); | ||||
|             }); | ||||
|         }); | ||||
|     }; | ||||
|  | ||||
|     // Object to store ongoing requests by namespace | ||||
|     const requests = {}; | ||||
|  | ||||
|     $.abortiveSingularAjax = function (options) { | ||||
|         const namespace = options.namespace || 'default'; | ||||
|  | ||||
|         // Abort the current request in this namespace if it's still ongoing | ||||
|         if (requests[namespace]) { | ||||
|             requests[namespace].abort(); | ||||
|         } | ||||
|  | ||||
|         // Start a new AJAX request and store its reference in the correct namespace | ||||
|         requests[namespace] = $.ajax(options); | ||||
|  | ||||
|         // Return the current request in case it's needed | ||||
|         return requests[namespace]; | ||||
|     }; | ||||
| })(jQuery); | ||||
|  | ||||
|  | ||||
|  | ||||
| function toggleOpacity(checkboxSelector, fieldSelector, inverted) { | ||||
|     const checkbox = document.querySelector(checkboxSelector); | ||||
|     const fields = document.querySelectorAll(fieldSelector); | ||||
|  | ||||
|     function updateOpacity() { | ||||
|         const opacityValue = !checkbox.checked ? (inverted ? 0.6 : 1) : (inverted ? 1 : 0.6); | ||||
|         fields.forEach(field => { | ||||
|             field.style.opacity = opacityValue; | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     // Initial setup | ||||
|     updateOpacity(); | ||||
|     checkbox.addEventListener('change', updateOpacity); | ||||
| } | ||||
|  | ||||
| function toggleVisibility(checkboxSelector, fieldSelector, inverted) { | ||||
|     const checkbox = document.querySelector(checkboxSelector); | ||||
|     const fields = document.querySelectorAll(fieldSelector); | ||||
|  | ||||
|     function updateOpacity() { | ||||
|         const opacityValue = !checkbox.checked ? (inverted ? 'none' : 'block') : (inverted ? 'block' : 'none'); | ||||
|         fields.forEach(field => { | ||||
|             field.style.display = opacityValue; | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     // Initial setup | ||||
|     updateOpacity(); | ||||
|     checkbox.addEventListener('change', updateOpacity); | ||||
| } | ||||
| @@ -1,53 +1,63 @@ | ||||
| function redirect_to_version(version) { | ||||
|     var currentUrl = window.location.href; | ||||
|     var baseUrl = currentUrl.split('?')[0]; // Base URL without query parameters | ||||
| function redirectToVersion(version) { | ||||
|     var currentUrl = window.location.href.split('?')[0]; // Base URL without query parameters | ||||
|     var anchor = ''; | ||||
|  | ||||
|     // Check if there is an anchor | ||||
|     if (baseUrl.indexOf('#') !== -1) { | ||||
|         anchor = baseUrl.substring(baseUrl.indexOf('#')); | ||||
|         baseUrl = baseUrl.substring(0, baseUrl.indexOf('#')); | ||||
|     if (currentUrl.indexOf('#') !== -1) { | ||||
|         anchor = currentUrl.substring(currentUrl.indexOf('#')); | ||||
|         currentUrl = currentUrl.substring(0, currentUrl.indexOf('#')); | ||||
|     } | ||||
|     window.location.href = baseUrl + '?version=' + version + anchor; | ||||
|  | ||||
|     window.location.href = currentUrl + '?version=' + version + anchor; | ||||
| } | ||||
|  | ||||
| document.addEventListener('keydown', function (event) { | ||||
|     var selectElement = document.getElementById('preview-version'); | ||||
|     if (selectElement) { | ||||
|         var selectedOption = selectElement.querySelector('option:checked'); | ||||
|         if (selectedOption) { | ||||
|             if (event.key === 'ArrowLeft') { | ||||
|                 if (selectedOption.previousElementSibling) { | ||||
|                     redirect_to_version(selectedOption.previousElementSibling.value); | ||||
|                 } | ||||
|             } else if (event.key === 'ArrowRight') { | ||||
|                 if (selectedOption.nextElementSibling) { | ||||
|                     redirect_to_version(selectedOption.nextElementSibling.value); | ||||
|                 } | ||||
| function setupDateWidget() { | ||||
|     $(document).on('keydown', function (event) { | ||||
|         var $selectElement = $('#preview-version'); | ||||
|         var $selectedOption = $selectElement.find('option:selected'); | ||||
|  | ||||
|         if ($selectedOption.length) { | ||||
|             if (event.key === 'ArrowLeft' && $selectedOption.prev().length) { | ||||
|                 redirectToVersion($selectedOption.prev().val()); | ||||
|             } else if (event.key === 'ArrowRight' && $selectedOption.next().length) { | ||||
|                 redirectToVersion($selectedOption.next().val()); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| }); | ||||
|     }); | ||||
|  | ||||
|     $('#preview-version').on('change', function () { | ||||
|         redirectToVersion($(this).val()); | ||||
|     }); | ||||
|  | ||||
| document.getElementById('preview-version').addEventListener('change', function () { | ||||
|     redirect_to_version(this.value); | ||||
| }); | ||||
|     var $selectedOption = $('#preview-version option:selected'); | ||||
|  | ||||
| var selectElement = document.getElementById('preview-version'); | ||||
| if (selectElement) { | ||||
|     var selectedOption = selectElement.querySelector('option:checked'); | ||||
|     if (selectedOption) { | ||||
|         if (selectedOption.previousElementSibling) { | ||||
|             document.getElementById('btn-previous').href = "?version=" + selectedOption.previousElementSibling.value; | ||||
|     if ($selectedOption.length) { | ||||
|         var $prevOption = $selectedOption.prev(); | ||||
|         var $nextOption = $selectedOption.next(); | ||||
|  | ||||
|         if ($prevOption.length) { | ||||
|             $('#btn-previous').attr('href', '?version=' + $prevOption.val()); | ||||
|         } else { | ||||
|             document.getElementById('btn-previous').remove() | ||||
|         } | ||||
|         if (selectedOption.nextElementSibling) { | ||||
|             document.getElementById('btn-next').href = "?version=" + selectedOption.nextElementSibling.value; | ||||
|         } else { | ||||
|             document.getElementById('btn-next').remove() | ||||
|             $('#btn-previous').remove(); | ||||
|         } | ||||
|  | ||||
|         if ($nextOption.length) { | ||||
|             $('#btn-next').attr('href', '?version=' + $nextOption.val()); | ||||
|         } else { | ||||
|             $('#btn-next').remove(); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| $(document).ready(function () { | ||||
|     if ($('#preview-version').length) { | ||||
|         setupDateWidget(); | ||||
|     } | ||||
|  | ||||
|     $('#diff-col > pre').highlightLines([ | ||||
|         { | ||||
|             'color': '#ee0000', | ||||
|             'lines': triggered_line_numbers | ||||
|         } | ||||
|     ]); | ||||
| }); | ||||
|   | ||||
| @@ -1,14 +1,14 @@ | ||||
| $(function () { | ||||
|     /* add container before each proxy location to show status */ | ||||
|  | ||||
|     var option_li = $('.fetch-backend-proxy li').filter(function() { | ||||
|         return $("input",this)[0].value.length >0; | ||||
|     }); | ||||
|  | ||||
|     //var option_li = $('.fetch-backend-proxy li'); | ||||
|     var isActive = false; | ||||
|     $(option_li).prepend('<div class="proxy-status"></div>'); | ||||
|     $(option_li).append('<div class="proxy-timing"></div><div class="proxy-check-details"></div>'); | ||||
|  | ||||
|     function setup_html_widget() { | ||||
|         var option_li = $('.fetch-backend-proxy li').filter(function () { | ||||
|             return $("input", this)[0].value.length > 0; | ||||
|         }); | ||||
|         $(option_li).prepend('<div class="proxy-status"></div>'); | ||||
|         $(option_li).append('<div class="proxy-timing"></div><div class="proxy-check-details"></div>'); | ||||
|     } | ||||
|  | ||||
|     function set_proxy_check_status(proxy_key, state) { | ||||
|         // select input by value name | ||||
| @@ -59,8 +59,14 @@ $(function () { | ||||
|     } | ||||
|  | ||||
|     $('#check-all-proxies').click(function (e) { | ||||
|  | ||||
|         e.preventDefault() | ||||
|         $('body').addClass('proxy-check-active'); | ||||
|  | ||||
|         if (!$('body').hasClass('proxy-check-active')) { | ||||
|             setup_html_widget(); | ||||
|             $('body').addClass('proxy-check-active'); | ||||
|         } | ||||
|  | ||||
|         $('.proxy-check-details').html(''); | ||||
|         $('.proxy-status').html('<span class="spinner"></span>').fadeIn(); | ||||
|         $('.proxy-timing').html(''); | ||||
|   | ||||
							
								
								
									
										109
									
								
								changedetectionio/static/js/scheduler.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,109 @@ | ||||
| function getTimeInTimezone(timezone) { | ||||
|     const now = new Date(); | ||||
|     const options = { | ||||
|         timeZone: timezone, | ||||
|         weekday: 'long', | ||||
|         year: 'numeric', | ||||
|         hour12: false, | ||||
|         month: '2-digit', | ||||
|         day: '2-digit', | ||||
|         hour: '2-digit', | ||||
|         minute: '2-digit', | ||||
|         second: '2-digit', | ||||
|     }; | ||||
|  | ||||
|     const formatter = new Intl.DateTimeFormat('en-US', options); | ||||
|     return formatter.format(now); | ||||
| } | ||||
|  | ||||
| $(document).ready(function () { | ||||
|  | ||||
|     let exceedsLimit = false; | ||||
|     const warning_text = $("#timespan-warning") | ||||
|     const timezone_text_widget = $("input[id*='time_schedule_limit-timezone']") | ||||
|  | ||||
|     toggleVisibility('#time_schedule_limit-enabled, #requests-time_schedule_limit-enabled', '#schedule-day-limits-wrapper', true) | ||||
|  | ||||
|     setInterval(() => { | ||||
|         let success = true; | ||||
|         try { | ||||
|             // Show the current local time according to either placeholder or entered TZ name | ||||
|             if (timezone_text_widget.val().length) { | ||||
|                 $('#local-time-in-tz').text(getTimeInTimezone(timezone_text_widget.val())); | ||||
|             } else { | ||||
|                 // So maybe use what is in the placeholder (which will be the default settings) | ||||
|                 $('#local-time-in-tz').text(getTimeInTimezone(timezone_text_widget.attr('placeholder'))); | ||||
|             } | ||||
|         } catch (error) { | ||||
|             success = false; | ||||
|             $('#local-time-in-tz').text(""); | ||||
|             console.error(timezone_text_widget.val()) | ||||
|         } | ||||
|  | ||||
|         $(timezone_text_widget).toggleClass('error', !success); | ||||
|  | ||||
|     }, 500); | ||||
|  | ||||
|     $('#schedule-day-limits-wrapper').on('change click blur', 'input, checkbox, select', function() { | ||||
|  | ||||
|         let allOk = true; | ||||
|  | ||||
|         // Controls setting the warning that the time could overlap into the next day | ||||
|         $("li.day-schedule").each(function () { | ||||
|             const $schedule = $(this); | ||||
|             const $checkbox = $schedule.find("input[type='checkbox']"); | ||||
|  | ||||
|             if ($checkbox.is(":checked")) { | ||||
|                 const timeValue = $schedule.find("input[type='time']").val(); | ||||
|                 const durationHours = parseInt($schedule.find("select[name*='-duration-hours']").val(), 10) || 0; | ||||
|                 const durationMinutes = parseInt($schedule.find("select[name*='-duration-minutes']").val(), 10) || 0; | ||||
|  | ||||
|                 if (timeValue) { | ||||
|                     const [startHours, startMinutes] = timeValue.split(":").map(Number); | ||||
|                     const totalMinutes = (startHours * 60 + startMinutes) + (durationHours * 60 + durationMinutes); | ||||
|  | ||||
|                     exceedsLimit = totalMinutes > 1440 | ||||
|                     if (exceedsLimit) { | ||||
|                         allOk = false | ||||
|                     } | ||||
|                     // Set the row/day-of-week highlight | ||||
|                     $schedule.toggleClass("warning", exceedsLimit); | ||||
|                 } | ||||
|             } else { | ||||
|                 $schedule.toggleClass("warning", false); | ||||
|             } | ||||
|         }); | ||||
|  | ||||
|         warning_text.toggle(!allOk) | ||||
|     }); | ||||
|  | ||||
|     $('table[id*="time_schedule_limit-saturday"], table[id*="time_schedule_limit-sunday"]').addClass("weekend-day") | ||||
|  | ||||
|     // Presets [weekend] [business hours] etc | ||||
|     $(document).on('click', '[data-template].set-schedule', function () { | ||||
|         // Get the value of the 'data-template' attribute | ||||
|         switch ($(this).attr('data-template')) { | ||||
|             case 'business-hours': | ||||
|                 $('.day-schedule table:not(.weekend-day) input[type="time"]').val('09:00') | ||||
|                 $('.day-schedule table:not(.weekend-day) select[id*="-duration-hours"]').val('8'); | ||||
|                 $('.day-schedule table:not(.weekend-day) select[id*="-duration-minutes"]').val('0'); | ||||
|                 $('.day-schedule input[id*="-enabled"]').prop('checked', true); | ||||
|                 $('.day-schedule .weekend-day input[id*="-enabled"]').prop('checked', false); | ||||
|                 break; | ||||
|             case 'weekend': | ||||
|                 $('.day-schedule .weekend-day input[type="time"][id$="start-time"]').val('00:00') | ||||
|                 $('.day-schedule .weekend-day select[id*="-duration-hours"]').val('24'); | ||||
|                 $('.day-schedule .weekend-day select[id*="-duration-minutes"]').val('0'); | ||||
|                 $('.day-schedule input[id*="-enabled"]').prop('checked', false); | ||||
|                 $('.day-schedule .weekend-day input[id*="-enabled"]').prop('checked', true); | ||||
|                 break; | ||||
|             case 'reset': | ||||
|  | ||||
|                 $('.day-schedule input[type="time"]').val('00:00') | ||||
|                 $('.day-schedule select[id*="-duration-hours"]').val('24'); | ||||
|                 $('.day-schedule select[id*="-duration-minutes"]').val('0'); | ||||
|                 $('.day-schedule input[id*="-enabled"]').prop('checked', true); | ||||
|                 break; | ||||
|         } | ||||
|     }); | ||||
| }); | ||||
| @@ -26,8 +26,6 @@ function set_active_tab() { | ||||
|     if (tab.length) { | ||||
|         tab[0].parentElement.className = "active"; | ||||
|     } | ||||
|     // hash could move the page down | ||||
|     window.scrollTo(0, 0); | ||||
| } | ||||
|  | ||||
| function focus_error_tab() { | ||||
|   | ||||
| @@ -49,4 +49,9 @@ $(document).ready(function () { | ||||
|         $("#overlay").toggleClass('visible'); | ||||
|         heartpath.style.fill = document.getElementById("overlay").classList.contains("visible") ? '#ff0000' : 'var(--color-background)'; | ||||
|     }); | ||||
|  | ||||
|     setInterval(function () { | ||||
|         $('body').toggleClass('spinner-active', $.active > 0); | ||||
|     }, 2000); | ||||
|  | ||||
| }); | ||||
|   | ||||
| @@ -132,6 +132,7 @@ $(document).ready(() => { | ||||
|         }).done((data) => { | ||||
|             $fetchingUpdateNoticeElem.html("Rendering.."); | ||||
|             selectorData = data; | ||||
|  | ||||
|             sortScrapedElementsBySize(); | ||||
|             console.log(`Reported browser width from backend: ${data['browser_width']}`); | ||||
|  | ||||
|   | ||||
| @@ -48,6 +48,8 @@ $(function () { | ||||
|         $('input[type=checkbox]').not(this).prop('checked', this.checked); | ||||
|     }); | ||||
|  | ||||
|     const time_check_step_size_seconds=1; | ||||
|  | ||||
|     // checkboxes - show/hide buttons | ||||
|     $("input[type=checkbox]").click(function (e) { | ||||
|         if ($('input[type=checkbox]:checked').length) { | ||||
| @@ -57,5 +59,30 @@ $(function () { | ||||
|         } | ||||
|     }); | ||||
|  | ||||
|     setInterval(function () { | ||||
|         // Background ETA completion for 'checking now' | ||||
|         $(".watch-table .checking-now .last-checked").each(function () { | ||||
|             const eta_complete = parseFloat($(this).data('eta_complete')); | ||||
|             const fetch_duration = parseInt($(this).data('fetchduration')); | ||||
|  | ||||
|             if (eta_complete + 2 > nowtimeserver && fetch_duration > 3) { | ||||
|                 const remaining_seconds = Math.abs(eta_complete) - nowtimeserver - 1; | ||||
|  | ||||
|                 let r = (1.0 - (remaining_seconds / fetch_duration)) * 100; | ||||
|                 if (r < 10) { | ||||
|                     r = 10; | ||||
|                 } | ||||
|                 if (r >= 90) { | ||||
|                     r = 100; | ||||
|                 } | ||||
|                 $(this).css('background-size', `${r}% 100%`); | ||||
|                 //$(this).text(`${r}% remain ${remaining_seconds}`); | ||||
|             } else { | ||||
|                 $(this).css('background-size', `100% 100%`); | ||||
|             } | ||||
|         }); | ||||
|  | ||||
|         nowtimeserver = nowtimeserver + time_check_step_size_seconds; | ||||
|     }, time_check_step_size_seconds * 1000); | ||||
| }); | ||||
|  | ||||
|   | ||||
| @@ -1,18 +1,51 @@ | ||||
| function toggleOpacity(checkboxSelector, fieldSelector, inverted) { | ||||
|     const checkbox = document.querySelector(checkboxSelector); | ||||
|     const fields = document.querySelectorAll(fieldSelector); | ||||
|     function updateOpacity() { | ||||
|         const opacityValue = !checkbox.checked ? (inverted ? 0.6 : 1) : (inverted ? 1 : 0.6); | ||||
|         fields.forEach(field => { | ||||
|             field.style.opacity = opacityValue; | ||||
|         }); | ||||
|  | ||||
| function request_textpreview_update() { | ||||
|     if (!$('body').hasClass('preview-text-enabled')) { | ||||
|         console.error("Preview text was requested but body tag was not setup") | ||||
|         return | ||||
|     } | ||||
|     // Initial setup | ||||
|     updateOpacity(); | ||||
|     checkbox.addEventListener('change', updateOpacity); | ||||
|  | ||||
|     const data = {}; | ||||
|     $('textarea:visible, input:visible').each(function () { | ||||
|         const $element = $(this); // Cache the jQuery object for the current element | ||||
|         const name = $element.attr('name'); // Get the name attribute of the element | ||||
|         data[name] = $element.is(':checkbox') ? ($element.is(':checked') ? $element.val() : false) : $element.val(); | ||||
|     }); | ||||
|  | ||||
|     $('body').toggleClass('spinner-active', 1); | ||||
|  | ||||
|     $.abortiveSingularAjax({ | ||||
|         type: "POST", | ||||
|         url: preview_text_edit_filters_url, | ||||
|         data: data, | ||||
|         namespace: 'watchEdit' | ||||
|     }).done(function (data) { | ||||
|         console.debug(data['duration']) | ||||
|         $('#filters-and-triggers #text-preview-before-inner').text(data['before_filter']); | ||||
|         $('#filters-and-triggers #text-preview-inner') | ||||
|             .text(data['after_filter']) | ||||
|             .highlightLines([ | ||||
|                 { | ||||
|                     'color': '#ee0000', | ||||
|                     'lines': data['trigger_line_numbers'] | ||||
|                 }, | ||||
|                 { | ||||
|                     'color': '#757575', | ||||
|                     'lines': data['ignore_line_numbers'] | ||||
|                 } | ||||
|             ]) | ||||
|     }).fail(function (error) { | ||||
|         if (error.statusText === 'abort') { | ||||
|             console.log('Request was aborted due to a new request being fired.'); | ||||
|         } else { | ||||
|             $('#filters-and-triggers #text-preview-inner').text('There was an error communicating with the server.'); | ||||
|         } | ||||
|     }) | ||||
| } | ||||
|  | ||||
|  | ||||
| $(document).ready(function () { | ||||
|  | ||||
|     $('#notification-setting-reset-to-default').click(function (e) { | ||||
|         $('#notification_title').val(''); | ||||
|         $('#notification_body').val(''); | ||||
| @@ -25,7 +58,24 @@ $(document).ready(function () { | ||||
|         $('#notification-tokens-info').toggle(); | ||||
|     }); | ||||
|  | ||||
|     toggleOpacity('#time_between_check_use_default', '#time_between_check', false); | ||||
|     toggleOpacity('#time_between_check_use_default', '#time-check-widget-wrapper, #time-between-check-schedule', false); | ||||
|  | ||||
|  | ||||
|     const vh = Math.max(document.documentElement.clientHeight || 0, window.innerHeight || 0); | ||||
|     $("#text-preview-inner").css('max-height', (vh - 300) + "px"); | ||||
|     $("#text-preview-before-inner").css('max-height', (vh - 300) + "px"); | ||||
|  | ||||
|     $("#activate-text-preview").click(function (e) { | ||||
|         $('body').toggleClass('preview-text-enabled') | ||||
|         request_textpreview_update(); | ||||
|         const method = $('body').hasClass('preview-text-enabled') ? 'on' : 'off'; | ||||
|         $('#filters-and-triggers textarea')[method]('blur', request_textpreview_update.throttle(1000)); | ||||
|         $('#filters-and-triggers input')[method]('change', request_textpreview_update.throttle(1000)); | ||||
|         $("#filters-and-triggers-tab")[method]('click', request_textpreview_update.throttle(1000)); | ||||
|     }); | ||||
|     $('.minitabs-wrapper').miniTabs({ | ||||
|         "Content after filters": "#text-preview-inner", | ||||
|         "Content raw/before filters": "#text-preview-before-inner" | ||||
|     }); | ||||
| }); | ||||
|  | ||||
|   | ||||
| @@ -153,7 +153,8 @@ html[data-darkmode="true"] { | ||||
|     border: 1px solid transparent; | ||||
|     vertical-align: top; | ||||
|     font: 1em monospace; | ||||
|     text-align: left; } | ||||
|     text-align: left; | ||||
|     overflow: clip; } | ||||
|   #diff-ui pre { | ||||
|     white-space: pre-wrap; } | ||||
|  | ||||
| @@ -172,7 +173,9 @@ ins { | ||||
|   text-decoration: none; } | ||||
|  | ||||
| #result { | ||||
|   white-space: pre-wrap; } | ||||
|   white-space: pre-wrap; | ||||
|   word-break: break-word; | ||||
|   overflow-wrap: break-word; } | ||||
|  | ||||
| #settings { | ||||
|   background: rgba(0, 0, 0, 0.05); | ||||
| @@ -231,3 +234,12 @@ td#diff-col div { | ||||
|   border-radius: 5px; | ||||
|   background: var(--color-background); | ||||
|   box-shadow: 1px 1px 4px var(--color-shadow-jump); } | ||||
|  | ||||
| .pure-form button.reset-margin { | ||||
|   margin: 0px; } | ||||
|  | ||||
| .diff-fieldset { | ||||
|   display: flex; | ||||
|   align-items: center; | ||||
|   gap: 4px; | ||||
|   flex-wrap: wrap; } | ||||
|   | ||||
| @@ -24,6 +24,7 @@ | ||||
|     vertical-align: top; | ||||
|     font: 1em monospace; | ||||
|     text-align: left; | ||||
|     overflow: clip; // clip overflowing contents to cell boundariess | ||||
|   } | ||||
|  | ||||
|   pre { | ||||
| @@ -50,6 +51,8 @@ ins { | ||||
|  | ||||
| #result { | ||||
|   white-space: pre-wrap; | ||||
|   word-break: break-word; | ||||
|   overflow-wrap: break-word; | ||||
|  | ||||
|   .change { | ||||
|     span {} | ||||
| @@ -134,3 +137,15 @@ td#diff-col div { | ||||
|   background: var(--color-background); | ||||
|   box-shadow: 1px 1px 4px var(--color-shadow-jump); | ||||
| } | ||||
|  | ||||
| // resets button margin to 0px | ||||
| .pure-form button.reset-margin { | ||||
|   margin: 0px; | ||||
| } | ||||
|  | ||||
| .diff-fieldset { | ||||
|   display: flex; | ||||
|   align-items: center; | ||||
|   gap: 4px; | ||||
|   flex-wrap: wrap; | ||||
| } | ||||
| @@ -40,19 +40,22 @@ | ||||
|   } | ||||
| } | ||||
|  | ||||
| @media only screen and (min-width: 760px) { | ||||
|  | ||||
| #browser-steps .flex-wrapper { | ||||
|   display: flex; | ||||
|   flex-flow: row; | ||||
|   height: 70vh; | ||||
|   font-size: 80%; | ||||
|   #browser-steps-ui { | ||||
|     flex-grow: 1;      /* Allow it to grow and fill the available space */ | ||||
|     flex-shrink: 1;    /* Allow it to shrink if needed */ | ||||
|     flex-basis: 0;     /* Start with 0 base width so it stretches as much as possible */ | ||||
|     background-color: #eee; | ||||
|     border-radius: 5px; | ||||
|   #browser-steps .flex-wrapper { | ||||
|     display: flex; | ||||
|     flex-flow: row; | ||||
|     height: 70vh; | ||||
|     font-size: 80%; | ||||
|  | ||||
|     #browser-steps-ui { | ||||
|       flex-grow: 1; /* Allow it to grow and fill the available space */ | ||||
|       flex-shrink: 1; /* Allow it to shrink if needed */ | ||||
|       flex-basis: 0; /* Start with 0 base width so it stretches as much as possible */ | ||||
|       background-color: #eee; | ||||
|       border-radius: 5px; | ||||
|  | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   #browser-steps-fieldlist { | ||||
| @@ -63,15 +66,21 @@ | ||||
|     padding-left: 1rem; | ||||
|     overflow-y: scroll; | ||||
|   } | ||||
|  | ||||
|   /*  this is duplicate :( */ | ||||
|   #browsersteps-selector-wrapper { | ||||
|     height: 100% !important; | ||||
|   } | ||||
| } | ||||
|  | ||||
| /*  this is duplicate :( */ | ||||
| #browsersteps-selector-wrapper { | ||||
|   height: 100%; | ||||
|  | ||||
|   width: 100%; | ||||
|   overflow-y: scroll; | ||||
|   position: relative; | ||||
|   //width: 100%; | ||||
|   height: 80vh; | ||||
|  | ||||
|   > img { | ||||
|     position: absolute; | ||||
|     max-width: 100%; | ||||
| @@ -91,7 +100,6 @@ | ||||
|     left: 50%; | ||||
|     top: 50%; | ||||
|     transform: translate(-50%, -50%); | ||||
|     margin-left: -40px; | ||||
|     z-index: 100; | ||||
|     max-width: 350px; | ||||
|     text-align: center; | ||||
|   | ||||
| @@ -0,0 +1,135 @@ | ||||
| /* Styles for the flexbox-based table replacement for conditions */ | ||||
| .fieldlist_formfields { | ||||
|   width: 100%; | ||||
|   background-color: var(--color-background, #fff); | ||||
|   border-radius: 4px; | ||||
|   border: 1px solid var(--color-border-table-cell, #cbcbcb); | ||||
|    | ||||
|   /* Header row */ | ||||
|   .fieldlist-header { | ||||
|     display: flex; | ||||
|     background-color: var(--color-background-table-thead, #e0e0e0); | ||||
|     font-weight: bold; | ||||
|     border-bottom: 1px solid var(--color-border-table-cell, #cbcbcb); | ||||
|   } | ||||
|    | ||||
|   .fieldlist-header-cell { | ||||
|     flex: 1; | ||||
|     padding: 0.5em 1em; | ||||
|     text-align: left; | ||||
|      | ||||
|     &:last-child { | ||||
|       flex: 0 0 120px; /* Fixed width for actions column */ | ||||
|     } | ||||
|   } | ||||
|    | ||||
|   /* Body rows */ | ||||
|   .fieldlist-body { | ||||
|     display: flex; | ||||
|     flex-direction: column; | ||||
|   } | ||||
|    | ||||
|   .fieldlist-row { | ||||
|     display: flex; | ||||
|     border-bottom: 1px solid var(--color-border-table-cell, #cbcbcb); | ||||
|      | ||||
|     &:last-child { | ||||
|       border-bottom: none; | ||||
|     } | ||||
|      | ||||
|     &:nth-child(2n-1) { | ||||
|       background-color: var(--color-table-stripe, #f2f2f2); | ||||
|     } | ||||
|      | ||||
|     &.error-row { | ||||
|       background-color: var(--color-error-input, #ffdddd); | ||||
|     } | ||||
|   } | ||||
|    | ||||
|   .fieldlist-cell { | ||||
|     flex: 1; | ||||
|     padding: 0.5em 1em; | ||||
|     display: flex; | ||||
|     flex-direction: column; | ||||
|     justify-content: center; | ||||
|      | ||||
|     /* Make inputs take up full width of their cell */ | ||||
|     input, select { | ||||
|       width: 100%; | ||||
|     } | ||||
|      | ||||
|     &.fieldlist-actions { | ||||
|       flex: 0 0 120px; /* Fixed width for actions column */ | ||||
|       display: flex; | ||||
|       flex-direction: row; | ||||
|       align-items: center; | ||||
|       gap: 4px; | ||||
|     } | ||||
|   } | ||||
|    | ||||
|   /* Error styling */ | ||||
|   ul.errors { | ||||
|     margin-top: 0.5em; | ||||
|     margin-bottom: 0; | ||||
|     padding: 0.5em; | ||||
|     background-color: var(--color-error-background-snapshot-age, #ffdddd); | ||||
|     border-radius: 4px; | ||||
|     list-style-position: inside; | ||||
|   } | ||||
|    | ||||
|   /* Responsive styles */ | ||||
|   @media only screen and (max-width: 760px) { | ||||
|     .fieldlist-header, .fieldlist-row { | ||||
|       flex-direction: column; | ||||
|     } | ||||
|      | ||||
|     .fieldlist-header-cell { | ||||
|       display: none; | ||||
|     } | ||||
|      | ||||
|     .fieldlist-row { | ||||
|       padding: 0.5em 0; | ||||
|       border-bottom: 2px solid var(--color-border-table-cell, #cbcbcb); | ||||
|     } | ||||
|      | ||||
|     .fieldlist-cell { | ||||
|       padding: 0.25em 0.5em; | ||||
|        | ||||
|       &.fieldlist-actions { | ||||
|         flex: 1; | ||||
|         justify-content: flex-start; | ||||
|         padding-top: 0.5em; | ||||
|       } | ||||
|     } | ||||
|      | ||||
|     /* Add some spacing between fields on mobile */ | ||||
|     .fieldlist-cell:not(:last-child) { | ||||
|       margin-bottom: 0.5em; | ||||
|     } | ||||
|      | ||||
|     /* Label each cell on mobile view */ | ||||
|     .fieldlist-cell::before { | ||||
|       content: attr(data-label); | ||||
|       font-weight: bold; | ||||
|       margin-bottom: 0.25em; | ||||
|     } | ||||
|   } | ||||
| } | ||||
|  | ||||
| /* Button styling */ | ||||
| .fieldlist_formfields { | ||||
|   .addRuleRow, .removeRuleRow, .verifyRuleRow { | ||||
|     cursor: pointer; | ||||
|     border: none; | ||||
|     padding: 4px 8px; | ||||
|     border-radius: 3px; | ||||
|     font-weight: bold; | ||||
|     background-color: #aaa; | ||||
|     color: var(--color-foreground-text, #fff); | ||||
|  | ||||
|     &:hover { | ||||
|       background-color: #999; | ||||
|     } | ||||
|   } | ||||
|  | ||||
| } | ||||