mirror of
				https://github.com/dgtlmoon/changedetection.io.git
				synced 2025-10-30 22:27:52 +00:00 
			
		
		
		
	Compare commits
	
		
			101 Commits
		
	
	
		
			minor-stoc
			...
			UI-tabs-fi
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 74799cd840 | ||
|   | 467f055b67 | ||
|   | 82211eef82 | ||
|   | 5d9380609c | ||
|   | a8b3918fca | ||
|   | e83fb37fb6 | ||
|   | 6b99afe0f7 | ||
|   | 09ebc6ec63 | ||
|   | 6b1065502e | ||
|   | d4c470984a | ||
|   | 55da48f719 | ||
|   | dbd4adf23a | ||
|   | b1e700b3ff | ||
|   | 1c61b5a623 | ||
|   | e799a1cdcb | ||
|   | 938065db6f | ||
|   | 4f2d38ff49 | ||
|   | 8960f401b7 | ||
|   | 1c1f1c6f6b | ||
|   | a2a98811a5 | ||
|   | 5a0ef8fc01 | ||
|   | d90de0851d | ||
|   | 360b4f0d8b | ||
|   | 6fc04d7f1c | ||
|   | 66fb05527b | ||
|   | 202e47d728 | ||
|   | d67d396b88 | ||
|   | 05f54f0ce6 | ||
|   | 6adf10597e | ||
|   | 4419bc0e61 | ||
|   | f7e9846c9b | ||
|   | 5dea5e1def | ||
|   | 0fade0a473 | ||
|   | 121e9c20e0 | ||
|   | 12cec2d541 | ||
|   | d52e6e8e11 | ||
|   | bae1a89b75 | ||
|   | e49711f449 | ||
|   | a3a3ab0622 | ||
|   | c5fe188b28 | ||
|   | 1fb0adde54 | ||
|   | 2614b275f0 | ||
|   | 1631a55830 | ||
|   | f00b8e4efb | ||
|   | 179ca171d4 | ||
|   | 84f2870d4f | ||
|   | 7421e0f95e | ||
|   | c6162e48f1 | ||
|   | feccb18cdc | ||
|   | 1462ad89ac | ||
|   | cfb9fadec8 | ||
|   | d9f9fa735d | ||
|   | 6084b0f23d | ||
|   | 4e18aea5ff | ||
|   | fdba6b5566 | ||
|   | 4e6c783c45 | ||
|   | 0f0f5af7b5 | ||
|   | 7fcba26bea | ||
|   | 4bda1a234f | ||
|   | d297850539 | ||
|   | 751239250f | ||
|   | 6aceeb01ab | ||
|   | 49bc982c69 | ||
|   | e0abf0b505 | ||
|   | f08a1185aa | ||
|   | ad5d7efbbf | ||
|   | 7029d10f8b | ||
|   | 26d3a23e05 | ||
|   | 942625e1fb | ||
|   | 33c83230a6 | ||
|   | 87510becb5 | ||
|   | 5e95dc62a5 | ||
|   | 7d94535dbf | ||
|   | 563c196396 | ||
|   | e8b82c47ca | ||
|   | e84de7e8f4 | ||
|   | 1543edca24 | ||
|   | 82e0b99b07 | ||
|   | b0ff9d161e | ||
|   | c1dd681643 | ||
|   | ecafa27833 | ||
|   | f7d4e58613 | ||
|   | 5bb47e47db | ||
|   | 03151da68e | ||
|   | a16a70229d | ||
|   | 9476c1076b | ||
|   | a4959b5971 | ||
|   | a278fa22f2 | ||
|   | d39530b261 | ||
|   | d4b4355ff5 | ||
|   | c1c8de3104 | ||
|   | 5a768d7db3 | ||
|   | f38429ec93 | ||
|   | 783926962d | ||
|   | 6cd1d50a4f | ||
|   | 54a4970a4c | ||
|   | fd00453e6d | ||
|   | 2842ffb205 | ||
|   | ec4e2f5649 | ||
|   | fe8e3d1cb1 | ||
|   | 69fbafbdb7 | 
| @@ -1,18 +1,31 @@ | ||||
| .git | ||||
| .github | ||||
| changedetectionio/processors/__pycache__ | ||||
| changedetectionio/api/__pycache__ | ||||
| changedetectionio/model/__pycache__ | ||||
| changedetectionio/blueprint/price_data_follower/__pycache__ | ||||
| changedetectionio/blueprint/tags/__pycache__ | ||||
| changedetectionio/blueprint/__pycache__ | ||||
| changedetectionio/blueprint/browser_steps/__pycache__ | ||||
| changedetectionio/fetchers/__pycache__ | ||||
| changedetectionio/tests/visualselector/__pycache__ | ||||
| changedetectionio/tests/restock/__pycache__ | ||||
| changedetectionio/tests/__pycache__ | ||||
| changedetectionio/tests/fetchers/__pycache__ | ||||
| changedetectionio/tests/unit/__pycache__ | ||||
| changedetectionio/tests/proxy_list/__pycache__ | ||||
| changedetectionio/__pycache__ | ||||
| # Git | ||||
| .git/ | ||||
| .gitignore | ||||
|  | ||||
| # GitHub | ||||
| .github/ | ||||
|  | ||||
| # Byte-compiled / optimized / DLL files | ||||
| **/__pycache__ | ||||
| **/*.py[cod] | ||||
|  | ||||
| # Caches | ||||
| .mypy_cache/ | ||||
| .pytest_cache/ | ||||
| .ruff_cache/ | ||||
|  | ||||
| # Distribution / packaging | ||||
| build/ | ||||
| dist/ | ||||
| *.egg-info* | ||||
|  | ||||
| # Virtual environment | ||||
| .env | ||||
| .venv/ | ||||
| venv/ | ||||
|  | ||||
| # IntelliJ IDEA | ||||
| .idea/ | ||||
|  | ||||
| # Visual Studio | ||||
| .vscode/ | ||||
|   | ||||
							
								
								
									
										4
									
								
								.github/ISSUE_TEMPLATE/bug_report.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/ISSUE_TEMPLATE/bug_report.md
									
									
									
									
										vendored
									
									
								
							| @@ -27,6 +27,10 @@ A clear and concise description of what the bug is. | ||||
| **Version** | ||||
| *Exact version* in the top right area: 0.... | ||||
|  | ||||
| **How did you install?** | ||||
|  | ||||
| Docker, Pip, from source directly etc | ||||
|  | ||||
| **To Reproduce** | ||||
|  | ||||
| Steps to reproduce the behavior: | ||||
|   | ||||
							
								
								
									
										23
									
								
								.github/test/Dockerfile-alpine
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										23
									
								
								.github/test/Dockerfile-alpine
									
									
									
									
										vendored
									
									
								
							| @@ -2,32 +2,33 @@ | ||||
| # Test that we can still build on Alpine (musl modified libc https://musl.libc.org/) | ||||
| # Some packages wont install via pypi because they dont have a wheel available under this architecture. | ||||
|  | ||||
| FROM ghcr.io/linuxserver/baseimage-alpine:3.18 | ||||
| FROM ghcr.io/linuxserver/baseimage-alpine:3.21 | ||||
| ENV PYTHONUNBUFFERED=1 | ||||
|  | ||||
| COPY requirements.txt /requirements.txt | ||||
|  | ||||
| RUN \ | ||||
|   apk add --update --no-cache --virtual=build-dependencies \ | ||||
|  apk add --update --no-cache --virtual=build-dependencies \ | ||||
|     build-base \ | ||||
|     cargo \ | ||||
|     g++ \ | ||||
|     gcc \ | ||||
|     git \ | ||||
|     jpeg-dev \ | ||||
|     libc-dev \ | ||||
|     libffi-dev \ | ||||
|     libjpeg \ | ||||
|     libxslt-dev \ | ||||
|     make \ | ||||
|     openssl-dev \ | ||||
|     py3-wheel \ | ||||
|     python3-dev \ | ||||
|     zip \ | ||||
|     zlib-dev && \ | ||||
|   apk add --update --no-cache \ | ||||
|     libjpeg \ | ||||
|     libxslt \ | ||||
|     python3 \ | ||||
|     py3-pip && \ | ||||
|     nodejs \ | ||||
|     poppler-utils \ | ||||
|     python3 && \ | ||||
|   echo "**** pip3 install test of changedetection.io ****" && \ | ||||
|   pip3 install -U pip wheel setuptools && \ | ||||
|   pip3 install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.18/ -r /requirements.txt && \ | ||||
|   python3 -m venv /lsiopy  && \ | ||||
|   pip install -U pip wheel setuptools && \ | ||||
|   pip install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.21/ -r /requirements.txt && \ | ||||
|   apk del --purge \ | ||||
|     build-dependencies | ||||
|   | ||||
							
								
								
									
										19
									
								
								.github/workflows/containers.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										19
									
								
								.github/workflows/containers.yml
									
									
									
									
										vendored
									
									
								
							| @@ -103,6 +103,19 @@ jobs: | ||||
| #          provenance: false | ||||
|  | ||||
|       # A new tagged release is required, which builds :tag and :latest | ||||
|       - name: Docker meta :tag | ||||
|         if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.') | ||||
|         uses: docker/metadata-action@v5 | ||||
|         id: meta | ||||
|         with: | ||||
|             images: | | ||||
|                 ${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io | ||||
|                 ghcr.io/dgtlmoon/changedetection.io | ||||
|             tags: | | ||||
|                 type=semver,pattern={{version}} | ||||
|                 type=semver,pattern={{major}}.{{minor}} | ||||
|                 type=semver,pattern={{major}} | ||||
|  | ||||
|       - name: Build and push :tag | ||||
|         id: docker_build_tag_release | ||||
|         if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.') | ||||
| @@ -111,11 +124,7 @@ jobs: | ||||
|           context: ./ | ||||
|           file: ./Dockerfile | ||||
|           push: true | ||||
|           tags: | | ||||
|             ${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:${{ github.event.release.tag_name }} | ||||
|             ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }} | ||||
|             ${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest | ||||
|             ghcr.io/dgtlmoon/changedetection.io:latest | ||||
|           tags: ${{ steps.meta.outputs.tags }} | ||||
|           platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8 | ||||
|           cache-from: type=gha | ||||
|           cache-to: type=gha,mode=max | ||||
|   | ||||
							
								
								
									
										1
									
								
								.github/workflows/pypi-release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/pypi-release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -45,7 +45,6 @@ jobs: | ||||
|     - name: Test that the basic pip built package runs without error | ||||
|       run: | | ||||
|         set -ex | ||||
|         sudo pip3 install --upgrade pip  | ||||
|         pip3 install dist/changedetection.io*.whl | ||||
|         changedetection.io -d /tmp -p 10000 & | ||||
|         sleep 3 | ||||
|   | ||||
							
								
								
									
										7
									
								
								.github/workflows/test-only.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								.github/workflows/test-only.yml
									
									
									
									
										vendored
									
									
								
							| @@ -37,3 +37,10 @@ jobs: | ||||
|       python-version: '3.12' | ||||
|       skip-pypuppeteer: true | ||||
|  | ||||
|   test-application-3-13: | ||||
|     needs: lint-code | ||||
|     uses: ./.github/workflows/test-stack-reusable-workflow.yml | ||||
|     with: | ||||
|       python-version: '3.13' | ||||
|       skip-pypuppeteer: true | ||||
|        | ||||
|   | ||||
| @@ -64,14 +64,16 @@ jobs: | ||||
|           echo "Running processes in docker..." | ||||
|           docker ps | ||||
|  | ||||
|       - name: Test built container with Pytest (generally as requests/plaintext fetching) | ||||
|       - name: Run Unit Tests | ||||
|         run: | | ||||
|           # Unit tests | ||||
|           echo "run test with unittest" | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_notification_diff' | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_watch_model' | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_jinja2_security' | ||||
|            | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_semver' | ||||
|  | ||||
|       - name: Test built container with Pytest (generally as requests/plaintext fetching) | ||||
|         run: | | ||||
|           # All tests | ||||
|           echo "run test with pytest" | ||||
|           # The default pytest logger_level is TRACE | ||||
|   | ||||
							
								
								
									
										39
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										39
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,14 +1,29 @@ | ||||
| __pycache__ | ||||
| .idea | ||||
| *.pyc | ||||
| datastore/url-watches.json | ||||
| datastore/* | ||||
| __pycache__ | ||||
| .pytest_cache | ||||
| build | ||||
| dist | ||||
| venv | ||||
| test-datastore/* | ||||
| test-datastore | ||||
| # Byte-compiled / optimized / DLL files | ||||
| **/__pycache__ | ||||
| **/*.py[cod] | ||||
|  | ||||
| # Caches | ||||
| .mypy_cache/ | ||||
| .pytest_cache/ | ||||
| .ruff_cache/ | ||||
|  | ||||
| # Distribution / packaging | ||||
| build/ | ||||
| dist/ | ||||
| *.egg-info* | ||||
|  | ||||
| # Virtual environment | ||||
| .env | ||||
| .venv/ | ||||
| venv/ | ||||
|  | ||||
| # IDEs | ||||
| .idea | ||||
| .vscode/settings.json | ||||
|  | ||||
| # Datastore files | ||||
| datastore/ | ||||
| test-datastore/ | ||||
|  | ||||
| # Memory consumption log | ||||
| test-memory.log | ||||
|   | ||||
| @@ -4,7 +4,7 @@ In any commercial activity involving 'Hosting' (as defined herein), whether in p | ||||
|  | ||||
| # Commercial License Agreement | ||||
|  | ||||
| This Commercial License Agreement ("Agreement") is entered into by and between Mr Morresi (the original creator of this software) here-in ("Licensor") and (your company or personal name) _____________ ("Licensee"). This Agreement sets forth the terms and conditions under which Licensor provides its software ("Software") and services to Licensee for the purpose of reselling the software either in part or full, as part of any commercial activity where the activity involves a third party. | ||||
| This Commercial License Agreement ("Agreement") is entered into by and between Web Technologies s.r.o. here-in ("Licensor") and (your company or personal name) _____________ ("Licensee"). This Agreement sets forth the terms and conditions under which Licensor provides its software ("Software") and services to Licensee for the purpose of reselling the software either in part or full, as part of any commercial activity where the activity involves a third party. | ||||
|  | ||||
| ### Definition of Hosting | ||||
|  | ||||
|   | ||||
| @@ -32,11 +32,12 @@ RUN pip install --extra-index-url https://www.piwheels.org/simple  --target=/dep | ||||
| # Playwright is an alternative to Selenium | ||||
| # Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing | ||||
| # https://github.com/dgtlmoon/changedetection.io/pull/1067 also musl/alpine (not supported) | ||||
| RUN pip install --target=/dependencies playwright~=1.41.2 \ | ||||
| RUN pip install --target=/dependencies playwright~=1.48.0 \ | ||||
|     || echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled." | ||||
|  | ||||
| # Final image stage | ||||
| FROM python:${PYTHON_VERSION}-slim-bookworm | ||||
| LABEL org.opencontainers.image.source="https://github.com/dgtlmoon/changedetection.io" | ||||
|  | ||||
| RUN apt-get update && apt-get install -y --no-install-recommends \ | ||||
|     libxslt1.1 \ | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| recursive-include changedetectionio/api * | ||||
| recursive-include changedetectionio/apprise_plugin * | ||||
| recursive-include changedetectionio/blueprint * | ||||
| recursive-include changedetectionio/content_fetchers * | ||||
| recursive-include changedetectionio/model * | ||||
|   | ||||
							
								
								
									
										11
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										11
									
								
								README.md
									
									
									
									
									
								
							| @@ -105,13 +105,22 @@ We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) glob | ||||
|  | ||||
| Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/ | ||||
|  | ||||
| ### Schedule web page watches in any timezone, limit by day of week and time. | ||||
|  | ||||
| Easily set a re-check schedule, for example you could limit the web page change detection to only operate during business hours. | ||||
| Or perhaps based on a foreign timezone (for example, you want to check for the latest news-headlines in a foreign country at 0900 AM), | ||||
|  | ||||
| <img src="./docs/scheduler.png" style="max-width:80%;" alt="How to monitor web page changes according to a schedule"  title="How to monitor web page changes according to a schedule"  /> | ||||
|  | ||||
| Includes quick short-cut buttons to setup a schedule for **business hours only**, or **weekends**. | ||||
|  | ||||
| ### We have a Chrome extension! | ||||
|  | ||||
| Easily add the current web page to your changedetection.io tool, simply install the extension and click "Sync" to connect it to your existing changedetection.io install. | ||||
|  | ||||
| [<img src="./docs/chrome-extension-screenshot.png" style="max-width:80%;" alt="Chrome Extension to easily add the current web-page to detect a change."  title="Chrome Extension to easily add the current web-page to detect a change."  />](https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop) | ||||
|  | ||||
| [Goto the Chrome Webstore to download the extension.](https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop) | ||||
| [Goto the Chrome Webstore to download the extension.](https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop) ( Or check out the [GitHub repo](https://github.com/dgtlmoon/changedetection.io-browser-extension) )  | ||||
|  | ||||
| ## Installation | ||||
|  | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
| # Read more https://github.com/dgtlmoon/changedetection.io/wiki | ||||
|  | ||||
| __version__ = '0.46.04' | ||||
| __version__ = '0.49.1' | ||||
|  | ||||
| from changedetectionio.strtobool import strtobool | ||||
| from json.decoder import JSONDecodeError | ||||
| @@ -24,6 +24,9 @@ from loguru import logger | ||||
| app = None | ||||
| datastore = None | ||||
|  | ||||
| def get_version(): | ||||
|     return __version__ | ||||
|  | ||||
| # Parent wrapper or OS sends us a SIGTERM/SIGINT, do everything required for a clean shutdown | ||||
| def sigshutdown_handler(_signo, _stack_frame): | ||||
|     global app | ||||
| @@ -160,11 +163,10 @@ def main(): | ||||
|                     ) | ||||
|  | ||||
|     # Monitored websites will not receive a Referer header when a user clicks on an outgoing link. | ||||
|     # @Note: Incompatible with password login (and maybe other features) for now, submit a PR! | ||||
|     @app.after_request | ||||
|     def hide_referrer(response): | ||||
|         if strtobool(os.getenv("HIDE_REFERER", 'false')): | ||||
|             response.headers["Referrer-Policy"] = "no-referrer" | ||||
|             response.headers["Referrer-Policy"] = "same-origin" | ||||
|  | ||||
|         return response | ||||
|  | ||||
|   | ||||
| @@ -112,6 +112,35 @@ def build_watch_json_schema(d): | ||||
|  | ||||
|     schema['properties']['time_between_check'] = build_time_between_check_json_schema() | ||||
|  | ||||
|     schema['properties']['browser_steps'] = { | ||||
|         "anyOf": [ | ||||
|             { | ||||
|                 "type": "array", | ||||
|                 "items": { | ||||
|                     "type": "object", | ||||
|                     "properties": { | ||||
|                         "operation": { | ||||
|                             "type": ["string", "null"], | ||||
|                             "maxLength": 5000  # Allows null and any string up to 5000 chars (including "") | ||||
|                         }, | ||||
|                         "selector": { | ||||
|                             "type": ["string", "null"], | ||||
|                             "maxLength": 5000 | ||||
|                         }, | ||||
|                         "optional_value": { | ||||
|                             "type": ["string", "null"], | ||||
|                             "maxLength": 5000 | ||||
|                         } | ||||
|                     }, | ||||
|                     "required": ["operation", "selector", "optional_value"], | ||||
|                     "additionalProperties": False  # No extra keys allowed | ||||
|                 } | ||||
|             }, | ||||
|             {"type": "null"},  # Allows null for `browser_steps` | ||||
|             {"type": "array", "maxItems": 0}  # Allows empty array [] | ||||
|         ] | ||||
|     } | ||||
|  | ||||
|     # headers ? | ||||
|     return schema | ||||
|  | ||||
|   | ||||
| @@ -58,7 +58,7 @@ class Watch(Resource): | ||||
|             abort(404, message='No watch exists with the UUID of {}'.format(uuid)) | ||||
|  | ||||
|         if request.args.get('recheck'): | ||||
|             self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True})) | ||||
|             self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|             return "OK", 200 | ||||
|         if request.args.get('paused', '') == 'paused': | ||||
|             self.datastore.data['watching'].get(uuid).pause() | ||||
| @@ -76,6 +76,7 @@ class Watch(Resource): | ||||
|         # Return without history, get that via another API call | ||||
|         # Properties are not returned as a JSON, so add the required props manually | ||||
|         watch['history_n'] = watch.history_n | ||||
|         # attr .last_changed will check for the last written text snapshot on change | ||||
|         watch['last_changed'] = watch.last_changed | ||||
|         watch['viewed'] = watch.viewed | ||||
|         return watch | ||||
| @@ -246,7 +247,7 @@ class CreateWatch(Resource): | ||||
|  | ||||
|         new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags) | ||||
|         if new_uuid: | ||||
|             self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True})) | ||||
|             self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid})) | ||||
|             return {'uuid': new_uuid}, 201 | ||||
|         else: | ||||
|             return "Invalid or unsupported URL", 400 | ||||
| @@ -303,7 +304,7 @@ class CreateWatch(Resource): | ||||
|  | ||||
|         if request.args.get('recheck_all'): | ||||
|             for uuid in self.datastore.data['watching'].keys(): | ||||
|                 self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True})) | ||||
|                 self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|             return {'status': "OK"}, 200 | ||||
|  | ||||
|         return list, 200 | ||||
|   | ||||
| @@ -1,3 +1,4 @@ | ||||
| from changedetectionio import apprise_plugin | ||||
| import apprise | ||||
|  | ||||
| # Create our AppriseAsset and populate it with some of our new values: | ||||
|   | ||||
| @@ -1,5 +1,8 @@ | ||||
| # include the decorator | ||||
| from apprise.decorators import notify | ||||
| from loguru import logger | ||||
| from requests.structures import CaseInsensitiveDict | ||||
|  | ||||
|  | ||||
| @notify(on="delete") | ||||
| @notify(on="deletes") | ||||
| @@ -12,67 +15,84 @@ from apprise.decorators import notify | ||||
| def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs): | ||||
|     import requests | ||||
|     import json | ||||
|     from apprise.utils import parse_url as apprise_parse_url | ||||
|     from apprise import URLBase | ||||
|     import re | ||||
|  | ||||
|     from urllib.parse import unquote_plus | ||||
|     from apprise.utils.parse import parse_url as apprise_parse_url | ||||
|  | ||||
|     url = kwargs['meta'].get('url') | ||||
|     schema = kwargs['meta'].get('schema').lower().strip() | ||||
|  | ||||
|     if url.startswith('post'): | ||||
|         r = requests.post | ||||
|     elif url.startswith('get'): | ||||
|         r = requests.get | ||||
|     elif url.startswith('put'): | ||||
|         r = requests.put | ||||
|     elif url.startswith('delete'): | ||||
|         r = requests.delete | ||||
|     # Choose POST, GET etc from requests | ||||
|     method =  re.sub(rf's$', '', schema) | ||||
|     requests_method = getattr(requests, method) | ||||
|  | ||||
|     url = url.replace('post://', 'http://') | ||||
|     url = url.replace('posts://', 'https://') | ||||
|     url = url.replace('put://', 'http://') | ||||
|     url = url.replace('puts://', 'https://') | ||||
|     url = url.replace('get://', 'http://') | ||||
|     url = url.replace('gets://', 'https://') | ||||
|     url = url.replace('put://', 'http://') | ||||
|     url = url.replace('puts://', 'https://') | ||||
|     url = url.replace('delete://', 'http://') | ||||
|     url = url.replace('deletes://', 'https://') | ||||
|  | ||||
|     headers = {} | ||||
|     params = {} | ||||
|     params = CaseInsensitiveDict({}) # Added to requests | ||||
|     auth = None | ||||
|     has_error = False | ||||
|  | ||||
|     # Convert /foobar?+some-header=hello to proper header dictionary | ||||
|     results = apprise_parse_url(url) | ||||
|     if results: | ||||
|         # Add our headers that the user can potentially over-ride if they wish | ||||
|         # to to our returned result set and tidy entries by unquoting them | ||||
|         headers = {URLBase.unquote(x): URLBase.unquote(y) | ||||
|                    for x, y in results['qsd+'].items()} | ||||
|  | ||||
|         # https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation | ||||
|         # In Apprise, it relies on prefixing each request arg with "-", because it uses say &method=update as a flag for apprise | ||||
|         # but here we are making straight requests, so we need todo convert this against apprise's logic | ||||
|         for k, v in results['qsd'].items(): | ||||
|             if not k.strip('+-') in results['qsd+'].keys(): | ||||
|                 params[URLBase.unquote(k)] = URLBase.unquote(v) | ||||
|     # Add our headers that the user can potentially over-ride if they wish | ||||
|     # to to our returned result set and tidy entries by unquoting them | ||||
|     headers = CaseInsensitiveDict({unquote_plus(x): unquote_plus(y) | ||||
|                for x, y in results['qsd+'].items()}) | ||||
|  | ||||
|         # Determine Authentication | ||||
|         auth = '' | ||||
|         if results.get('user') and results.get('password'): | ||||
|             auth = (URLBase.unquote(results.get('user')), URLBase.unquote(results.get('user'))) | ||||
|         elif results.get('user'): | ||||
|             auth = (URLBase.unquote(results.get('user'))) | ||||
|     # https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation | ||||
|     # In Apprise, it relies on prefixing each request arg with "-", because it uses say &method=update as a flag for apprise | ||||
|     # but here we are making straight requests, so we need todo convert this against apprise's logic | ||||
|     for k, v in results['qsd'].items(): | ||||
|         if not k.strip('+-') in results['qsd+'].keys(): | ||||
|             params[unquote_plus(k)] = unquote_plus(v) | ||||
|  | ||||
|     # Try to auto-guess if it's JSON | ||||
|     # Determine Authentication | ||||
|     auth = '' | ||||
|     if results.get('user') and results.get('password'): | ||||
|         auth = (unquote_plus(results.get('user')), unquote_plus(results.get('user'))) | ||||
|     elif results.get('user'): | ||||
|         auth = (unquote_plus(results.get('user'))) | ||||
|  | ||||
|     # If it smells like it could be JSON and no content-type was already set, offer a default content type. | ||||
|     if body and '{' in body[:100] and not headers.get('Content-Type'): | ||||
|         json_header = 'application/json; charset=utf-8' | ||||
|         try: | ||||
|             # Try if it's JSON | ||||
|             json.loads(body) | ||||
|             headers['Content-Type'] = json_header | ||||
|         except ValueError as e: | ||||
|             logger.warning(f"Could not automatically add '{json_header}' header to the notification because the document failed to parse as JSON: {e}") | ||||
|             pass | ||||
|  | ||||
|     # POSTS -> HTTPS etc | ||||
|     if schema.lower().endswith('s'): | ||||
|         url = re.sub(rf'^{schema}', 'https', results.get('url')) | ||||
|     else: | ||||
|         url = re.sub(rf'^{schema}', 'http', results.get('url')) | ||||
|  | ||||
|     status_str = '' | ||||
|     try: | ||||
|         json.loads(body) | ||||
|         headers['Content-Type'] = 'application/json; charset=utf-8' | ||||
|     except ValueError as e: | ||||
|         pass | ||||
|         r = requests_method(url, | ||||
|           auth=auth, | ||||
|           data=body.encode('utf-8') if type(body) is str else body, | ||||
|           headers=headers, | ||||
|           params=params | ||||
|         ) | ||||
|  | ||||
|     r(results.get('url'), | ||||
|       auth=auth, | ||||
|       data=body.encode('utf-8') if type(body) is str else body, | ||||
|       headers=headers, | ||||
|       params=params | ||||
|       ) | ||||
|         if not (200 <= r.status_code < 300): | ||||
|             status_str = f"Error sending '{method.upper()}' request to {url} - Status: {r.status_code}: '{r.reason}'" | ||||
|             logger.error(status_str) | ||||
|             has_error = True | ||||
|         else: | ||||
|             logger.info(f"Sent '{method.upper()}' request to {url}") | ||||
|             has_error = False | ||||
|  | ||||
|     except requests.RequestException as e: | ||||
|         status_str = f"Error sending '{method.upper()}' request to {url} - {str(e)}" | ||||
|         logger.error(status_str) | ||||
|         has_error = True | ||||
|  | ||||
|     if has_error: | ||||
|         raise TypeError(status_str) | ||||
|  | ||||
|     return True | ||||
|   | ||||
							
								
								
									
										164
									
								
								changedetectionio/blueprint/backups/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										164
									
								
								changedetectionio/blueprint/backups/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,164 @@ | ||||
| import datetime | ||||
| import glob | ||||
| import threading | ||||
|  | ||||
| from flask import Blueprint, render_template, send_from_directory, flash, url_for, redirect, abort | ||||
| import os | ||||
|  | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from changedetectionio.flask_app import login_optionally_required | ||||
| from loguru import logger | ||||
|  | ||||
| BACKUP_FILENAME_FORMAT = "changedetection-backup-{}.zip" | ||||
|  | ||||
|  | ||||
| def create_backup(datastore_path, watches: dict): | ||||
|     logger.debug("Creating backup...") | ||||
|     import zipfile | ||||
|     from pathlib import Path | ||||
|  | ||||
|     # create a ZipFile object | ||||
|     timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") | ||||
|     backupname = BACKUP_FILENAME_FORMAT.format(timestamp) | ||||
|     backup_filepath = os.path.join(datastore_path, backupname) | ||||
|  | ||||
|     with zipfile.ZipFile(backup_filepath.replace('.zip', '.tmp'), "w", | ||||
|                          compression=zipfile.ZIP_DEFLATED, | ||||
|                          compresslevel=8) as zipObj: | ||||
|  | ||||
|         # Add the index | ||||
|         zipObj.write(os.path.join(datastore_path, "url-watches.json"), arcname="url-watches.json") | ||||
|  | ||||
|         # Add the flask app secret | ||||
|         zipObj.write(os.path.join(datastore_path, "secret.txt"), arcname="secret.txt") | ||||
|  | ||||
|         # Add any data in the watch data directory. | ||||
|         for uuid, w in watches.items(): | ||||
|             for f in Path(w.watch_data_dir).glob('*'): | ||||
|                 zipObj.write(f, | ||||
|                              # Use the full path to access the file, but make the file 'relative' in the Zip. | ||||
|                              arcname=os.path.join(f.parts[-2], f.parts[-1]), | ||||
|                              compress_type=zipfile.ZIP_DEFLATED, | ||||
|                              compresslevel=8) | ||||
|  | ||||
|         # Create a list file with just the URLs, so it's easier to port somewhere else in the future | ||||
|         list_file = "url-list.txt" | ||||
|         with open(os.path.join(datastore_path, list_file), "w") as f: | ||||
|             for uuid in watches: | ||||
|                 url = watches[uuid]["url"] | ||||
|                 f.write("{}\r\n".format(url)) | ||||
|         list_with_tags_file = "url-list-with-tags.txt" | ||||
|         with open( | ||||
|                 os.path.join(datastore_path, list_with_tags_file), "w" | ||||
|         ) as f: | ||||
|             for uuid in watches: | ||||
|                 url = watches[uuid].get('url') | ||||
|                 tag = watches[uuid].get('tags', {}) | ||||
|                 f.write("{} {}\r\n".format(url, tag)) | ||||
|  | ||||
|         # Add it to the Zip | ||||
|         zipObj.write( | ||||
|             os.path.join(datastore_path, list_file), | ||||
|             arcname=list_file, | ||||
|             compress_type=zipfile.ZIP_DEFLATED, | ||||
|             compresslevel=8, | ||||
|         ) | ||||
|         zipObj.write( | ||||
|             os.path.join(datastore_path, list_with_tags_file), | ||||
|             arcname=list_with_tags_file, | ||||
|             compress_type=zipfile.ZIP_DEFLATED, | ||||
|             compresslevel=8, | ||||
|         ) | ||||
|  | ||||
|     # Now it's done, rename it so it shows up finally and its completed being written. | ||||
|     os.rename(backup_filepath.replace('.zip', '.tmp'), backup_filepath.replace('.tmp', '.zip')) | ||||
|  | ||||
|  | ||||
| def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     backups_blueprint = Blueprint('backups', __name__, template_folder="templates") | ||||
|     backup_threads = [] | ||||
|  | ||||
|     @login_optionally_required | ||||
|     @backups_blueprint.route("/request-backup", methods=['GET']) | ||||
|     def request_backup(): | ||||
|         if any(thread.is_alive() for thread in backup_threads): | ||||
|             flash("A backup is already running, check back in a few minutes", "error") | ||||
|             return redirect(url_for('backups.index')) | ||||
|  | ||||
|         if len(find_backups()) > int(os.getenv("MAX_NUMBER_BACKUPS", 100)): | ||||
|             flash("Maximum number of backups reached, please remove some", "error") | ||||
|             return redirect(url_for('backups.index')) | ||||
|  | ||||
|         # Be sure we're written fresh | ||||
|         datastore.sync_to_json() | ||||
|         zip_thread = threading.Thread(target=create_backup, args=(datastore.datastore_path, datastore.data.get("watching"))) | ||||
|         zip_thread.start() | ||||
|         backup_threads.append(zip_thread) | ||||
|         flash("Backup building in background, check back in a few minutes.") | ||||
|  | ||||
|         return redirect(url_for('backups.index')) | ||||
|  | ||||
|     def find_backups(): | ||||
|         backup_filepath = os.path.join(datastore.datastore_path, BACKUP_FILENAME_FORMAT.format("*")) | ||||
|         backups = glob.glob(backup_filepath) | ||||
|         backup_info = [] | ||||
|  | ||||
|         for backup in backups: | ||||
|             size = os.path.getsize(backup) / (1024 * 1024) | ||||
|             creation_time = os.path.getctime(backup) | ||||
|             backup_info.append({ | ||||
|                 'filename': os.path.basename(backup), | ||||
|                 'filesize': f"{size:.2f}", | ||||
|                 'creation_time': creation_time | ||||
|             }) | ||||
|  | ||||
|         backup_info.sort(key=lambda x: x['creation_time'], reverse=True) | ||||
|  | ||||
|         return backup_info | ||||
|  | ||||
|     @login_optionally_required | ||||
|     @backups_blueprint.route("/download/<string:filename>", methods=['GET']) | ||||
|     def download_backup(filename): | ||||
|         import re | ||||
|         filename = filename.strip() | ||||
|         backup_filename_regex = BACKUP_FILENAME_FORMAT.format("\d+") | ||||
|  | ||||
|         full_path = os.path.join(os.path.abspath(datastore.datastore_path), filename) | ||||
|         if not full_path.startswith(os.path.abspath(datastore.datastore_path)): | ||||
|             abort(404) | ||||
|  | ||||
|         if filename == 'latest': | ||||
|             backups = find_backups() | ||||
|             filename = backups[0]['filename'] | ||||
|  | ||||
|         if not re.match(r"^" + backup_filename_regex + "$", filename): | ||||
|             abort(400)  # Bad Request if the filename doesn't match the pattern | ||||
|  | ||||
|         logger.debug(f"Backup download request for '{full_path}'") | ||||
|         return send_from_directory(os.path.abspath(datastore.datastore_path), filename, as_attachment=True) | ||||
|  | ||||
|     @login_optionally_required | ||||
|     @backups_blueprint.route("/", methods=['GET']) | ||||
|     def index(): | ||||
|         backups = find_backups() | ||||
|         output = render_template("overview.html", | ||||
|                                  available_backups=backups, | ||||
|                                  backup_running=any(thread.is_alive() for thread in backup_threads) | ||||
|                                  ) | ||||
|  | ||||
|         return output | ||||
|  | ||||
|     @login_optionally_required | ||||
|     @backups_blueprint.route("/remove-backups", methods=['GET']) | ||||
|     def remove_backups(): | ||||
|  | ||||
|         backup_filepath = os.path.join(datastore.datastore_path, BACKUP_FILENAME_FORMAT.format("*")) | ||||
|         backups = glob.glob(backup_filepath) | ||||
|         for backup in backups: | ||||
|             os.unlink(backup) | ||||
|  | ||||
|         flash("Backups were deleted.") | ||||
|  | ||||
|         return redirect(url_for('backups.index')) | ||||
|  | ||||
|     return backups_blueprint | ||||
							
								
								
									
										36
									
								
								changedetectionio/blueprint/backups/templates/overview.html
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								changedetectionio/blueprint/backups/templates/overview.html
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,36 @@ | ||||
| {% extends 'base.html' %} | ||||
| {% block content %} | ||||
|     {% from '_helpers.html' import render_simple_field, render_field %} | ||||
|     <div class="edit-form"> | ||||
|         <div class="box-wrap inner"> | ||||
|             <h4>Backups</h4> | ||||
|             {% if backup_running %} | ||||
|                 <p> | ||||
|                     <strong>A backup is running!</strong> | ||||
|                 </p> | ||||
|             {% endif %} | ||||
|             <p> | ||||
|                 Here you can download and request a new backup, when a backup is completed you will see it listed below. | ||||
|             </p> | ||||
|             <br> | ||||
|                 {% if available_backups %} | ||||
|                     <ul> | ||||
|                     {% for backup in available_backups %} | ||||
|                         <li><a href="{{ url_for('backups.download_backup', filename=backup["filename"]) }}">{{ backup["filename"] }}</a> {{  backup["filesize"] }} Mb</li> | ||||
|                     {% endfor %} | ||||
|                     </ul> | ||||
|                 {% else %} | ||||
|                     <p> | ||||
|                     <strong>No backups found.</strong> | ||||
|                     </p> | ||||
|                 {% endif %} | ||||
|  | ||||
|             <a class="pure-button pure-button-primary" href="{{ url_for('backups.request_backup') }}">Create backup</a> | ||||
|             {% if available_backups %} | ||||
|                 <a class="pure-button button-small button-error " href="{{ url_for('backups.remove_backups') }}">Remove backups</a> | ||||
|             {% endif %} | ||||
|         </div> | ||||
|     </div> | ||||
|  | ||||
|  | ||||
| {% endblock %} | ||||
| @@ -52,6 +52,8 @@ class steppable_browser_interface(): | ||||
|     page = None | ||||
|     start_url = None | ||||
|  | ||||
|     action_timeout = 10 * 1000 | ||||
|  | ||||
|     def __init__(self, start_url): | ||||
|         self.start_url = start_url | ||||
|  | ||||
| @@ -102,7 +104,7 @@ class steppable_browser_interface(): | ||||
|             return | ||||
|         elem = self.page.get_by_text(value) | ||||
|         if elem.count(): | ||||
|             elem.first.click(delay=randint(200, 500), timeout=3000) | ||||
|             elem.first.click(delay=randint(200, 500), timeout=self.action_timeout) | ||||
|  | ||||
|     def action_click_element_containing_text_if_exists(self, selector=None, value=''): | ||||
|         logger.debug("Clicking element containing text if exists") | ||||
| @@ -111,7 +113,7 @@ class steppable_browser_interface(): | ||||
|         elem = self.page.get_by_text(value) | ||||
|         logger.debug(f"Clicking element containing text - {elem.count()} elements found") | ||||
|         if elem.count(): | ||||
|             elem.first.click(delay=randint(200, 500), timeout=3000) | ||||
|             elem.first.click(delay=randint(200, 500), timeout=self.action_timeout) | ||||
|         else: | ||||
|             return | ||||
|  | ||||
| @@ -119,7 +121,7 @@ class steppable_browser_interface(): | ||||
|         if not len(selector.strip()): | ||||
|             return | ||||
|  | ||||
|         self.page.fill(selector, value, timeout=10 * 1000) | ||||
|         self.page.fill(selector, value, timeout=self.action_timeout) | ||||
|  | ||||
|     def action_execute_js(self, selector, value): | ||||
|         response = self.page.evaluate(value) | ||||
| @@ -130,7 +132,7 @@ class steppable_browser_interface(): | ||||
|         if not len(selector.strip()): | ||||
|             return | ||||
|  | ||||
|         self.page.click(selector=selector, timeout=30 * 1000, delay=randint(200, 500)) | ||||
|         self.page.click(selector=selector, timeout=self.action_timeout + 20 * 1000, delay=randint(200, 500)) | ||||
|  | ||||
|     def action_click_element_if_exists(self, selector, value): | ||||
|         import playwright._impl._errors as _api_types | ||||
| @@ -138,7 +140,7 @@ class steppable_browser_interface(): | ||||
|         if not len(selector.strip()): | ||||
|             return | ||||
|         try: | ||||
|             self.page.click(selector, timeout=10 * 1000, delay=randint(200, 500)) | ||||
|             self.page.click(selector, timeout=self.action_timeout, delay=randint(200, 500)) | ||||
|         except _api_types.TimeoutError as e: | ||||
|             return | ||||
|         except _api_types.Error as e: | ||||
| @@ -185,10 +187,10 @@ class steppable_browser_interface(): | ||||
|         self.page.keyboard.press("PageDown", delay=randint(200, 500)) | ||||
|  | ||||
|     def action_check_checkbox(self, selector, value): | ||||
|         self.page.locator(selector).check(timeout=1000) | ||||
|         self.page.locator(selector).check(timeout=self.action_timeout) | ||||
|  | ||||
|     def action_uncheck_checkbox(self, selector, value): | ||||
|         self.page.locator(selector, timeout=1000).uncheck(timeout=1000) | ||||
|         self.page.locator(selector).uncheck(timeout=self.action_timeout) | ||||
|  | ||||
|  | ||||
| # Responsible for maintaining a live 'context' with the chrome CDP | ||||
|   | ||||
| @@ -1,4 +1,7 @@ | ||||
| import importlib | ||||
| from concurrent.futures import ThreadPoolExecutor | ||||
|  | ||||
| from changedetectionio.processors.text_json_diff.processor import FilterNotFoundInResponse | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
|  | ||||
| from functools import wraps | ||||
| @@ -30,7 +33,6 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     def long_task(uuid, preferred_proxy): | ||||
|         import time | ||||
|         from changedetectionio.content_fetchers import exceptions as content_fetcher_exceptions | ||||
|         from changedetectionio.processors.text_json_diff import text_json_diff | ||||
|         from changedetectionio.safe_jinja import render as jinja_render | ||||
|  | ||||
|         status = {'status': '', 'length': 0, 'text': ''} | ||||
| @@ -38,8 +40,12 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|         contents = '' | ||||
|         now = time.time() | ||||
|         try: | ||||
|             update_handler = text_json_diff.perform_site_check(datastore=datastore, watch_uuid=uuid) | ||||
|             update_handler.call_browser() | ||||
|             processor_module = importlib.import_module("changedetectionio.processors.text_json_diff.processor") | ||||
|             update_handler = processor_module.perform_site_check(datastore=datastore, | ||||
|                                                                  watch_uuid=uuid | ||||
|                                                                  ) | ||||
|  | ||||
|             update_handler.call_browser(preferred_proxy_id=preferred_proxy) | ||||
|         # title, size is len contents not len xfer | ||||
|         except content_fetcher_exceptions.Non200ErrorCodeReceived as e: | ||||
|             if e.status_code == 404: | ||||
| @@ -48,7 +54,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|                 status.update({'status': 'ERROR', 'length': len(contents), 'text': f"{e.status_code} - Access denied"}) | ||||
|             else: | ||||
|                 status.update({'status': 'ERROR', 'length': len(contents), 'text': f"Status code: {e.status_code}"}) | ||||
|         except text_json_diff.FilterNotFoundInResponse: | ||||
|         except FilterNotFoundInResponse: | ||||
|             status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but CSS/xPath filter not found (page changed layout?)"}) | ||||
|         except content_fetcher_exceptions.EmptyReply as e: | ||||
|             if e.status_code == 403 or e.status_code == 401: | ||||
|   | ||||
| @@ -19,7 +19,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q: PriorityQueue | ||||
|         datastore.data['watching'][uuid]['track_ldjson_price_data'] = PRICE_DATA_TRACK_ACCEPT | ||||
|         datastore.data['watching'][uuid]['processor'] = 'restock_diff' | ||||
|         datastore.data['watching'][uuid].clear_watch() | ||||
|         update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False})) | ||||
|         update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|         return redirect(url_for("index")) | ||||
|  | ||||
|     @login_required | ||||
|   | ||||
| @@ -13,6 +13,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     def tags_overview_page(): | ||||
|         from .form import SingleTag | ||||
|         add_form = SingleTag(request.form) | ||||
|  | ||||
|         sorted_tags = sorted(datastore.data['settings']['application'].get('tags').items(), key=lambda x: x[1]['title']) | ||||
|  | ||||
|         from collections import Counter | ||||
| @@ -104,9 +105,11 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|  | ||||
|         default = datastore.data['settings']['application']['tags'].get(uuid) | ||||
|  | ||||
|         form = group_restock_settings_form(formdata=request.form if request.method == 'POST' else None, | ||||
|         form = group_restock_settings_form( | ||||
|                                        formdata=request.form if request.method == 'POST' else None, | ||||
|                                        data=default, | ||||
|                                        extra_notification_tokens=datastore.get_unique_notification_tokens_available() | ||||
|                                        extra_notification_tokens=datastore.get_unique_notification_tokens_available(), | ||||
|                                        default_system_settings = datastore.data['settings'], | ||||
|                                        ) | ||||
|  | ||||
|         template_args = { | ||||
|   | ||||
| @@ -17,7 +17,6 @@ | ||||
| </script> | ||||
|  | ||||
| <script src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script> | ||||
| <!--<script src="{{url_for('static_content', group='js', filename='limit.js')}}" defer></script>--> | ||||
| <script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script> | ||||
|  | ||||
| <div class="edit-form monospaced-textarea"> | ||||
|   | ||||
| @@ -4,7 +4,9 @@ from loguru import logger | ||||
| from changedetectionio.content_fetchers.exceptions import BrowserStepsStepException | ||||
| import os | ||||
|  | ||||
| visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4,header,footer,section,article,aside,details,main,nav,section,summary' | ||||
| # Visual Selector scraper - 'Button' is there because some sites have <button>OUT OF STOCK</button>. | ||||
| visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4,header,footer,section,article,aside,details,main,nav,section,summary,button' | ||||
|  | ||||
|  | ||||
| # available_fetchers() will scan this implementation looking for anything starting with html_ | ||||
| # this information is used in the form selections | ||||
|   | ||||
| @@ -75,6 +75,7 @@ class fetcher(Fetcher): | ||||
|         self.headers = r.headers | ||||
|  | ||||
|         if not r.content or not len(r.content): | ||||
|             logger.debug(f"Requests returned empty content for '{url}'") | ||||
|             if not empty_pages_are_a_change: | ||||
|                 raise EmptyReply(url=url, status_code=r.status_code) | ||||
|             else: | ||||
|   | ||||
| @@ -30,6 +30,8 @@ function isItemInStock() { | ||||
|         'dieser artikel ist bald wieder verfügbar', | ||||
|         'dostępne wkrótce', | ||||
|         'en rupture de stock', | ||||
|         'esgotado', | ||||
|         'indisponível', | ||||
|         'isn\'t in stock right now', | ||||
|         'isnt in stock right now', | ||||
|         'isn’t in stock right now', | ||||
| @@ -37,6 +39,7 @@ function isItemInStock() { | ||||
|         'let me know when it\'s available', | ||||
|         'mail me when available', | ||||
|         'message if back in stock', | ||||
|         'mevcut değil', | ||||
|         'nachricht bei', | ||||
|         'nicht auf lager', | ||||
|         'nicht lagernd', | ||||
| @@ -48,7 +51,8 @@ function isItemInStock() { | ||||
|         'niet beschikbaar', | ||||
|         'niet leverbaar', | ||||
|         'niet op voorraad', | ||||
|         'no disponible temporalmente', | ||||
|         'no disponible', | ||||
|         'non disponibile', | ||||
|         'no longer in stock', | ||||
|         'no tickets available', | ||||
|         'not available', | ||||
| @@ -57,6 +61,7 @@ function isItemInStock() { | ||||
|         'notify me when available', | ||||
|         'notify me', | ||||
|         'notify when available', | ||||
|         'não disponível', | ||||
|         'não estamos a aceitar encomendas', | ||||
|         'out of stock', | ||||
|         'out-of-stock', | ||||
| @@ -64,12 +69,14 @@ function isItemInStock() { | ||||
|         'produkt niedostępny', | ||||
|         'sold out', | ||||
|         'sold-out', | ||||
|         'stokta yok', | ||||
|         'temporarily out of stock', | ||||
|         'temporarily unavailable', | ||||
|         'there were no search results for', | ||||
|         'this item is currently unavailable', | ||||
|         'tickets unavailable', | ||||
|         'tijdelijk uitverkocht', | ||||
|         'tükendi', | ||||
|         'unavailable nearby', | ||||
|         'unavailable tickets', | ||||
|         'vergriffen', | ||||
| @@ -154,10 +161,14 @@ function isItemInStock() { | ||||
|         } | ||||
|  | ||||
|         elementText = ""; | ||||
|         if (element.tagName.toLowerCase() === "input") { | ||||
|             elementText = element.value.toLowerCase().trim(); | ||||
|         } else { | ||||
|             elementText = getElementBaseText(element); | ||||
|         try { | ||||
|             if (element.tagName.toLowerCase() === "input") { | ||||
|                 elementText = element.value.toLowerCase().trim(); | ||||
|             } else { | ||||
|                 elementText = getElementBaseText(element); | ||||
|             } | ||||
|         } catch (e) { | ||||
|             console.warn('stock-not-in-stock.js scraper - handling element for gettext failed', e); | ||||
|         } | ||||
|  | ||||
|         if (elementText.length) { | ||||
|   | ||||
| @@ -1,6 +1,9 @@ | ||||
| import difflib | ||||
| from typing import List, Iterator, Union | ||||
|  | ||||
| REMOVED_STYLE = "background-color: #fadad7; color: #b30000;" | ||||
| ADDED_STYLE = "background-color: #eaf2c2; color: #406619;" | ||||
|  | ||||
| def same_slicer(lst: List[str], start: int, end: int) -> List[str]: | ||||
|     """Return a slice of the list, or a single element if start == end.""" | ||||
|     return lst[start:end] if start != end else [lst[start]] | ||||
| @@ -12,11 +15,12 @@ def customSequenceMatcher( | ||||
|     include_removed: bool = True, | ||||
|     include_added: bool = True, | ||||
|     include_replaced: bool = True, | ||||
|     include_change_type_prefix: bool = True | ||||
|     include_change_type_prefix: bool = True, | ||||
|     html_colour: bool = False | ||||
| ) -> Iterator[List[str]]: | ||||
|     """ | ||||
|     Compare two sequences and yield differences based on specified parameters. | ||||
|      | ||||
|  | ||||
|     Args: | ||||
|         before (List[str]): Original sequence | ||||
|         after (List[str]): Modified sequence | ||||
| @@ -25,26 +29,35 @@ def customSequenceMatcher( | ||||
|         include_added (bool): Include added parts | ||||
|         include_replaced (bool): Include replaced parts | ||||
|         include_change_type_prefix (bool): Add prefixes to indicate change types | ||||
|      | ||||
|         html_colour (bool): Use HTML background colors for differences | ||||
|  | ||||
|     Yields: | ||||
|         List[str]: Differences between sequences | ||||
|     """ | ||||
|     cruncher = difflib.SequenceMatcher(isjunk=lambda x: x in " \t", a=before, b=after) | ||||
|      | ||||
|  | ||||
|  | ||||
|  | ||||
|     for tag, alo, ahi, blo, bhi in cruncher.get_opcodes(): | ||||
|         if include_equal and tag == 'equal': | ||||
|             yield before[alo:ahi] | ||||
|         elif include_removed and tag == 'delete': | ||||
|             prefix = "(removed) " if include_change_type_prefix else '' | ||||
|             yield [f"{prefix}{line}" for line in same_slicer(before, alo, ahi)] | ||||
|             if html_colour: | ||||
|                 yield [f'<span style="{REMOVED_STYLE}">{line}</span>' for line in same_slicer(before, alo, ahi)] | ||||
|             else: | ||||
|                 yield [f"(removed) {line}" for line in same_slicer(before, alo, ahi)] if include_change_type_prefix else same_slicer(before, alo, ahi) | ||||
|         elif include_replaced and tag == 'replace': | ||||
|             prefix_changed = "(changed) " if include_change_type_prefix else '' | ||||
|             prefix_into = "(into) " if include_change_type_prefix else '' | ||||
|             yield [f"{prefix_changed}{line}" for line in same_slicer(before, alo, ahi)] + \ | ||||
|                   [f"{prefix_into}{line}" for line in same_slicer(after, blo, bhi)] | ||||
|             if html_colour: | ||||
|                 yield [f'<span style="{REMOVED_STYLE}">{line}</span>' for line in same_slicer(before, alo, ahi)] + \ | ||||
|                       [f'<span style="{ADDED_STYLE}">{line}</span>' for line in same_slicer(after, blo, bhi)] | ||||
|             else: | ||||
|                 yield [f"(changed) {line}" for line in same_slicer(before, alo, ahi)] + \ | ||||
|                       [f"(into) {line}" for line in same_slicer(after, blo, bhi)] if include_change_type_prefix else same_slicer(before, alo, ahi) + same_slicer(after, blo, bhi) | ||||
|         elif include_added and tag == 'insert': | ||||
|             prefix = "(added) " if include_change_type_prefix else '' | ||||
|             yield [f"{prefix}{line}" for line in same_slicer(after, blo, bhi)] | ||||
|             if html_colour: | ||||
|                 yield [f'<span style="{ADDED_STYLE}">{line}</span>' for line in same_slicer(after, blo, bhi)] | ||||
|             else: | ||||
|                 yield [f"(added) {line}" for line in same_slicer(after, blo, bhi)] if include_change_type_prefix else same_slicer(after, blo, bhi) | ||||
|  | ||||
| def render_diff( | ||||
|     previous_version_file_contents: str, | ||||
| @@ -55,11 +68,12 @@ def render_diff( | ||||
|     include_replaced: bool = True, | ||||
|     line_feed_sep: str = "\n", | ||||
|     include_change_type_prefix: bool = True, | ||||
|     patch_format: bool = False | ||||
|     patch_format: bool = False, | ||||
|     html_colour: bool = False | ||||
| ) -> str: | ||||
|     """ | ||||
|     Render the difference between two file contents. | ||||
|      | ||||
|  | ||||
|     Args: | ||||
|         previous_version_file_contents (str): Original file contents | ||||
|         newest_version_file_contents (str): Modified file contents | ||||
| @@ -70,7 +84,8 @@ def render_diff( | ||||
|         line_feed_sep (str): Separator for lines in output | ||||
|         include_change_type_prefix (bool): Add prefixes to indicate change types | ||||
|         patch_format (bool): Use patch format for output | ||||
|      | ||||
|         html_colour (bool): Use HTML background colors for differences | ||||
|  | ||||
|     Returns: | ||||
|         str: Rendered difference | ||||
|     """ | ||||
| @@ -88,10 +103,11 @@ def render_diff( | ||||
|         include_removed=include_removed, | ||||
|         include_added=include_added, | ||||
|         include_replaced=include_replaced, | ||||
|         include_change_type_prefix=include_change_type_prefix | ||||
|         include_change_type_prefix=include_change_type_prefix, | ||||
|         html_colour=html_colour | ||||
|     ) | ||||
|  | ||||
|     def flatten(lst: List[Union[str, List[str]]]) -> str: | ||||
|         return line_feed_sep.join(flatten(x) if isinstance(x, list) else x for x in lst) | ||||
|  | ||||
|     return flatten(rendered_diff) | ||||
|     return flatten(rendered_diff) | ||||
| @@ -1,7 +1,7 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import datetime | ||||
| import importlib | ||||
| from zoneinfo import ZoneInfo | ||||
|  | ||||
| import flask_login | ||||
| import locale | ||||
| @@ -12,9 +12,7 @@ import threading | ||||
| import time | ||||
| import timeago | ||||
|  | ||||
| from .content_fetchers.exceptions import ReplyWithContentButNoText | ||||
| from .processors import find_processors, get_parent_module, get_custom_watch_obj_for_processor | ||||
| from .processors.text_json_diff.processor import FilterNotFoundInResponse | ||||
| from .safe_jinja import render as jinja_render | ||||
| from changedetectionio.strtobool import strtobool | ||||
| from copy import deepcopy | ||||
| @@ -45,6 +43,7 @@ from loguru import logger | ||||
| from changedetectionio import html_tools, __version__ | ||||
| from changedetectionio import queuedWatchMetaData | ||||
| from changedetectionio.api import api_v1 | ||||
| from .time_handler import is_within_schedule | ||||
|  | ||||
| datastore = None | ||||
|  | ||||
| @@ -56,6 +55,7 @@ extra_stylesheets = [] | ||||
|  | ||||
| update_q = queue.PriorityQueue() | ||||
| notification_q = queue.Queue() | ||||
| MAX_QUEUE_SIZE = 2000 | ||||
|  | ||||
| app = Flask(__name__, | ||||
|             static_url_path="", | ||||
| @@ -70,7 +70,6 @@ FlaskCompress(app) | ||||
|  | ||||
| # Stop browser caching of assets | ||||
| app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0 | ||||
|  | ||||
| app.config.exit = Event() | ||||
|  | ||||
| app.config['NEW_VERSION_AVAILABLE'] = False | ||||
| @@ -87,7 +86,7 @@ csrf = CSRFProtect() | ||||
| csrf.init_app(app) | ||||
| notification_debug_log=[] | ||||
|  | ||||
| # get locale ready | ||||
| # Locale for correct presentation of prices etc | ||||
| default_locale = locale.getdefaultlocale() | ||||
| logger.info(f"System locale default is {default_locale}") | ||||
| try: | ||||
| @@ -473,7 +472,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                     continue | ||||
|             if watch.get('last_error'): | ||||
|                 errored_count += 1 | ||||
|                  | ||||
|  | ||||
|             if search_q: | ||||
|                 if (watch.get('title') and search_q in watch.get('title').lower()) or search_q in watch.get('url', '').lower(): | ||||
|                     sorted_watches.append(watch) | ||||
| @@ -536,24 +535,32 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|     @login_optionally_required | ||||
|     def ajax_callback_send_notification_test(watch_uuid=None): | ||||
|  | ||||
|         # Watch_uuid could be unset in the case its used in tag editor, global setings | ||||
|         # Watch_uuid could be unset in the case it`s used in tag editor, global settings | ||||
|         import apprise | ||||
|         import random | ||||
|         from .apprise_asset import asset | ||||
|         apobj = apprise.Apprise(asset=asset) | ||||
|  | ||||
|         # so that the custom endpoints are registered | ||||
|         from changedetectionio.apprise_plugin import apprise_custom_api_call_wrapper | ||||
|         is_global_settings_form = request.args.get('mode', '') == 'global-settings' | ||||
|         is_group_settings_form = request.args.get('mode', '') == 'group-settings' | ||||
|  | ||||
|         # Use an existing random one on the global/main settings form | ||||
|         if not watch_uuid and (is_global_settings_form or is_group_settings_form): | ||||
|         if not watch_uuid and (is_global_settings_form or is_group_settings_form) \ | ||||
|                 and datastore.data.get('watching'): | ||||
|             logger.debug(f"Send test notification - Choosing random Watch {watch_uuid}") | ||||
|             watch_uuid = random.choice(list(datastore.data['watching'].keys())) | ||||
|  | ||||
|         if not watch_uuid: | ||||
|             return make_response("Error: You must have atleast one watch configured for 'test notification' to work", 400) | ||||
|  | ||||
|         watch = datastore.data['watching'].get(watch_uuid) | ||||
|  | ||||
|         notification_urls = request.form['notification_urls'].strip().splitlines() | ||||
|         notification_urls = None | ||||
|  | ||||
|         if request.form.get('notification_urls'): | ||||
|             notification_urls = request.form['notification_urls'].strip().splitlines() | ||||
|  | ||||
|         if not notification_urls: | ||||
|             logger.debug("Test notification - Trying by group/tag in the edit form if available") | ||||
| @@ -571,12 +578,12 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|  | ||||
|  | ||||
|         if not notification_urls: | ||||
|             return 'No Notification URLs set/found' | ||||
|             return 'Error: No Notification URLs set/found' | ||||
|  | ||||
|         for n_url in notification_urls: | ||||
|             if len(n_url.strip()): | ||||
|                 if not apobj.add(n_url): | ||||
|                     return f'Error - {n_url} is not a valid AppRise URL.' | ||||
|                     return f'Error:  {n_url} is not a valid AppRise URL.' | ||||
|  | ||||
|         try: | ||||
|             # use the same as when it is triggered, but then override it with the form test values | ||||
| @@ -591,15 +598,31 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|  | ||||
|             if 'notification_title' in request.form and request.form['notification_title'].strip(): | ||||
|                 n_object['notification_title'] = request.form.get('notification_title', '').strip() | ||||
|             elif datastore.data['settings']['application'].get('notification_title'): | ||||
|                 n_object['notification_title'] = datastore.data['settings']['application'].get('notification_title') | ||||
|             else: | ||||
|                 n_object['notification_title'] = "Test title" | ||||
|  | ||||
|             if 'notification_body' in request.form and request.form['notification_body'].strip(): | ||||
|                 n_object['notification_body'] = request.form.get('notification_body', '').strip() | ||||
|             elif datastore.data['settings']['application'].get('notification_body'): | ||||
|                 n_object['notification_body'] = datastore.data['settings']['application'].get('notification_body') | ||||
|             else: | ||||
|                 n_object['notification_body'] = "Test body" | ||||
|  | ||||
|             n_object['as_async'] = False | ||||
|             n_object.update(watch.extra_notification_token_values()) | ||||
|             from .notification import process_notification | ||||
|             sent_obj = process_notification(n_object, datastore) | ||||
|  | ||||
|             from . import update_worker | ||||
|             new_worker = update_worker.update_worker(update_q, notification_q, app, datastore) | ||||
|             new_worker.queue_notification_for_watch(notification_q=notification_q, n_object=n_object, watch=watch) | ||||
|         except Exception as e: | ||||
|             return make_response({'error': str(e)}, 400) | ||||
|             e_str = str(e) | ||||
|             # Remove this text which is not important and floods the container | ||||
|             e_str = e_str.replace( | ||||
|                 "DEBUG - <class 'apprise.decorators.base.CustomNotifyPlugin.instantiate_plugin.<locals>.CustomNotifyPluginWrapper'>", | ||||
|                 '') | ||||
|  | ||||
|             return make_response(e_str, 400) | ||||
|  | ||||
|         return 'OK - Sent test notifications' | ||||
|  | ||||
| @@ -709,7 +732,8 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|  | ||||
|         form = form_class(formdata=request.form if request.method == 'POST' else None, | ||||
|                           data=default, | ||||
|                           extra_notification_tokens=default.extra_notification_token_values() | ||||
|                           extra_notification_tokens=default.extra_notification_token_values(), | ||||
|                           default_system_settings=datastore.data['settings'] | ||||
|                           ) | ||||
|  | ||||
|         # For the form widget tag UUID back to "string name" for the field | ||||
| @@ -791,21 +815,47 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|             # Recast it if need be to right data Watch handler | ||||
|             watch_class = get_custom_watch_obj_for_processor(form.data.get('processor')) | ||||
|             datastore.data['watching'][uuid] = watch_class(datastore_path=datastore_o.datastore_path, default=datastore.data['watching'][uuid]) | ||||
|  | ||||
|             flash("Updated watch - unpaused!" if request.args.get('unpause_on_save') else "Updated watch.") | ||||
|  | ||||
|             # Re #286 - We wait for syncing new data to disk in another thread every 60 seconds | ||||
|             # But in the case something is added we should save straight away | ||||
|             datastore.needs_write_urgent = True | ||||
|  | ||||
|             # Queue the watch for immediate recheck, with a higher priority | ||||
|             update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False})) | ||||
|             # Do not queue on edit if its not within the time range | ||||
|  | ||||
|             # @todo maybe it should never queue anyway on edit... | ||||
|             is_in_schedule = True | ||||
|             watch = datastore.data['watching'].get(uuid) | ||||
|  | ||||
|             if watch.get('time_between_check_use_default'): | ||||
|                 time_schedule_limit = datastore.data['settings']['requests'].get('time_schedule_limit', {}) | ||||
|             else: | ||||
|                 time_schedule_limit = watch.get('time_schedule_limit') | ||||
|  | ||||
|             tz_name = time_schedule_limit.get('timezone') | ||||
|             if not tz_name: | ||||
|                 tz_name =  datastore.data['settings']['application'].get('timezone', 'UTC') | ||||
|  | ||||
|             if time_schedule_limit and time_schedule_limit.get('enabled'): | ||||
|                 try: | ||||
|                     is_in_schedule = is_within_schedule(time_schedule_limit=time_schedule_limit, | ||||
|                                                         default_tz=tz_name | ||||
|                                                         ) | ||||
|                 except Exception as e: | ||||
|                     logger.error( | ||||
|                         f"{uuid} - Recheck scheduler, error handling timezone, check skipped - TZ name '{tz_name}' - {str(e)}") | ||||
|                     return False | ||||
|  | ||||
|             ############################# | ||||
|             if not datastore.data['watching'][uuid].get('paused') and is_in_schedule: | ||||
|                 # Queue the watch for immediate recheck, with a higher priority | ||||
|                 update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|  | ||||
|             # Diff page [edit] link should go back to diff page | ||||
|             if request.args.get("next") and request.args.get("next") == 'diff': | ||||
|                 return redirect(url_for('diff_history_page', uuid=uuid)) | ||||
|  | ||||
|             return redirect(url_for('index')) | ||||
|             return redirect(url_for('index', tag=request.args.get("tag",''))) | ||||
|  | ||||
|         else: | ||||
|             if request.method == 'POST' and not form.validate(): | ||||
| @@ -829,15 +879,18 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|             if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver' or watch.get('fetch_backend', '').startswith('extra_browser_'): | ||||
|                 is_html_webdriver = True | ||||
|  | ||||
|             from zoneinfo import available_timezones | ||||
|  | ||||
|             # Only works reliably with Playwright | ||||
|             visualselector_enabled = os.getenv('PLAYWRIGHT_DRIVER_URL', False) and is_html_webdriver | ||||
|             template_args = { | ||||
|                 'available_processors': processors.available_processors(), | ||||
|                 'available_timezones': sorted(available_timezones()), | ||||
|                 'browser_steps_config': browser_step_ui_config, | ||||
|                 'emailprefix': os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False), | ||||
|                 'extra_title': f" - Edit - {watch.label}", | ||||
|                 'extra_processor_config': form.extra_tab_content(), | ||||
|                 'extra_notification_token_placeholder_info': datastore.get_unique_notification_token_placeholders_available(), | ||||
|                 'extra_processor_config': form.extra_tab_content(), | ||||
|                 'extra_title': f" - Edit - {watch.label}", | ||||
|                 'form': form, | ||||
|                 'has_default_notification_urls': True if len(datastore.data['settings']['application']['notification_urls']) else False, | ||||
|                 'has_extra_headers_file': len(datastore.get_all_headers_in_textfile_for_watch(uuid=uuid)) > 0, | ||||
| @@ -846,6 +899,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                 'jq_support': jq_support, | ||||
|                 'playwright_enabled': os.getenv('PLAYWRIGHT_DRIVER_URL', False), | ||||
|                 'settings_application': datastore.data['settings']['application'], | ||||
|                 'timezone_default_config': datastore.data['settings']['application'].get('timezone'), | ||||
|                 'using_global_webdriver_wait': not default['webdriver_delay'], | ||||
|                 'uuid': uuid, | ||||
|                 'visualselector_enabled': visualselector_enabled, | ||||
| @@ -875,6 +929,8 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|     @login_optionally_required | ||||
|     def settings_page(): | ||||
|         from changedetectionio import forms | ||||
|         from datetime import datetime | ||||
|         from zoneinfo import available_timezones | ||||
|  | ||||
|         default = deepcopy(datastore.data['settings']) | ||||
|         if datastore.proxy_list is not None: | ||||
| @@ -942,14 +998,20 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|             else: | ||||
|                 flash("An error occurred, please see below.", "error") | ||||
|  | ||||
|         # Convert to ISO 8601 format, all date/time relative events stored as UTC time | ||||
|         utc_time = datetime.now(ZoneInfo("UTC")).isoformat() | ||||
|  | ||||
|         output = render_template("settings.html", | ||||
|                                  api_key=datastore.data['settings']['application'].get('api_access_token'), | ||||
|                                  available_timezones=sorted(available_timezones()), | ||||
|                                  emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False), | ||||
|                                  extra_notification_token_placeholder_info=datastore.get_unique_notification_token_placeholders_available(), | ||||
|                                  form=form, | ||||
|                                  hide_remove_pass=os.getenv("SALTED_PASS", False), | ||||
|                                  min_system_recheck_seconds=int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 3)), | ||||
|                                  settings_application=datastore.data['settings']['application'] | ||||
|                                  settings_application=datastore.data['settings']['application'], | ||||
|                                  timezone_default_config=datastore.data['settings']['application'].get('timezone'), | ||||
|                                  utc_time=utc_time, | ||||
|                                  ) | ||||
|  | ||||
|         return output | ||||
| @@ -980,7 +1042,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                 importer = import_url_list() | ||||
|                 importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore, processor=request.values.get('processor', 'text_json_diff')) | ||||
|                 for uuid in importer.new_uuids: | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True})) | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|  | ||||
|                 if len(importer.remaining_data) == 0: | ||||
|                     return redirect(url_for('index')) | ||||
| @@ -993,7 +1055,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                 d_importer = import_distill_io_json() | ||||
|                 d_importer.run(data=request.values.get('distill-io'), flash=flash, datastore=datastore) | ||||
|                 for uuid in d_importer.new_uuids: | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True})) | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|  | ||||
|             # XLSX importer | ||||
|             if request.files and request.files.get('xlsx_file'): | ||||
| @@ -1017,7 +1079,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                     w_importer.run(data=file, flash=flash, datastore=datastore) | ||||
|  | ||||
|                 for uuid in w_importer.new_uuids: | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True})) | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|  | ||||
|         # Could be some remaining, or we could be on GET | ||||
|         form = forms.importForm(formdata=request.form if request.method == 'POST' else None) | ||||
| @@ -1230,78 +1292,6 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|  | ||||
|         return output | ||||
|  | ||||
|     # We're good but backups are even better! | ||||
|     @app.route("/backup", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def get_backup(): | ||||
|  | ||||
|         import zipfile | ||||
|         from pathlib import Path | ||||
|  | ||||
|         # Remove any existing backup file, for now we just keep one file | ||||
|  | ||||
|         for previous_backup_filename in Path(datastore_o.datastore_path).rglob('changedetection-backup-*.zip'): | ||||
|             os.unlink(previous_backup_filename) | ||||
|  | ||||
|         # create a ZipFile object | ||||
|         timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") | ||||
|         backupname = "changedetection-backup-{}.zip".format(timestamp) | ||||
|         backup_filepath = os.path.join(datastore_o.datastore_path, backupname) | ||||
|  | ||||
|         with zipfile.ZipFile(backup_filepath, "w", | ||||
|                              compression=zipfile.ZIP_DEFLATED, | ||||
|                              compresslevel=8) as zipObj: | ||||
|  | ||||
|             # Be sure we're written fresh | ||||
|             datastore.sync_to_json() | ||||
|  | ||||
|             # Add the index | ||||
|             zipObj.write(os.path.join(datastore_o.datastore_path, "url-watches.json"), arcname="url-watches.json") | ||||
|  | ||||
|             # Add the flask app secret | ||||
|             zipObj.write(os.path.join(datastore_o.datastore_path, "secret.txt"), arcname="secret.txt") | ||||
|  | ||||
|             # Add any data in the watch data directory. | ||||
|             for uuid, w in datastore.data['watching'].items(): | ||||
|                 for f in Path(w.watch_data_dir).glob('*'): | ||||
|                     zipObj.write(f, | ||||
|                                  # Use the full path to access the file, but make the file 'relative' in the Zip. | ||||
|                                  arcname=os.path.join(f.parts[-2], f.parts[-1]), | ||||
|                                  compress_type=zipfile.ZIP_DEFLATED, | ||||
|                                  compresslevel=8) | ||||
|  | ||||
|             # Create a list file with just the URLs, so it's easier to port somewhere else in the future | ||||
|             list_file = "url-list.txt" | ||||
|             with open(os.path.join(datastore_o.datastore_path, list_file), "w") as f: | ||||
|                 for uuid in datastore.data["watching"]: | ||||
|                     url = datastore.data["watching"][uuid]["url"] | ||||
|                     f.write("{}\r\n".format(url)) | ||||
|             list_with_tags_file = "url-list-with-tags.txt" | ||||
|             with open( | ||||
|                 os.path.join(datastore_o.datastore_path, list_with_tags_file), "w" | ||||
|             ) as f: | ||||
|                 for uuid in datastore.data["watching"]: | ||||
|                     url = datastore.data["watching"][uuid].get('url') | ||||
|                     tag = datastore.data["watching"][uuid].get('tags', {}) | ||||
|                     f.write("{} {}\r\n".format(url, tag)) | ||||
|  | ||||
|             # Add it to the Zip | ||||
|             zipObj.write( | ||||
|                 os.path.join(datastore_o.datastore_path, list_file), | ||||
|                 arcname=list_file, | ||||
|                 compress_type=zipfile.ZIP_DEFLATED, | ||||
|                 compresslevel=8, | ||||
|             ) | ||||
|             zipObj.write( | ||||
|                 os.path.join(datastore_o.datastore_path, list_with_tags_file), | ||||
|                 arcname=list_with_tags_file, | ||||
|                 compress_type=zipfile.ZIP_DEFLATED, | ||||
|                 compresslevel=8, | ||||
|             ) | ||||
|  | ||||
|         # Send_from_directory needs to be the full absolute path | ||||
|         return send_from_directory(os.path.abspath(datastore_o.datastore_path), backupname, as_attachment=True) | ||||
|  | ||||
|     @app.route("/static/<string:group>/<string:filename>", methods=['GET']) | ||||
|     def static_content(group, filename): | ||||
|         from flask import make_response | ||||
| @@ -1334,12 +1324,23 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|  | ||||
|             # These files should be in our subdirectory | ||||
|             try: | ||||
|                 # set nocache, set content-type | ||||
|                 response = make_response(send_from_directory(os.path.join(datastore_o.datastore_path, filename), "elements.json")) | ||||
|                 response.headers['Content-type'] = 'application/json' | ||||
|                 response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate' | ||||
|                 response.headers['Pragma'] = 'no-cache' | ||||
|                 response.headers['Expires'] = 0 | ||||
|                 # set nocache, set content-type, | ||||
|                 # `filename` is actually directory UUID of the watch | ||||
|                 watch_directory = str(os.path.join(datastore_o.datastore_path, filename)) | ||||
|                 response = None | ||||
|                 if os.path.isfile(os.path.join(watch_directory, "elements.deflate")): | ||||
|                     response = make_response(send_from_directory(watch_directory, "elements.deflate")) | ||||
|                     response.headers['Content-Type'] = 'application/json' | ||||
|                     response.headers['Content-Encoding'] = 'deflate' | ||||
|                 else: | ||||
|                     logger.error(f'Request elements.deflate at "{watch_directory}" but was notfound.') | ||||
|                     abort(404) | ||||
|  | ||||
|                 if response: | ||||
|                     response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate' | ||||
|                     response.headers['Pragma'] = 'no-cache' | ||||
|                     response.headers['Expires'] = "0" | ||||
|  | ||||
|                 return response | ||||
|  | ||||
|             except FileNotFoundError: | ||||
| @@ -1381,78 +1382,9 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|     @app.route("/edit/<string:uuid>/preview-rendered", methods=['POST']) | ||||
|     @login_optionally_required | ||||
|     def watch_get_preview_rendered(uuid): | ||||
|         from flask import jsonify | ||||
|         '''For when viewing the "preview" of the rendered text from inside of Edit''' | ||||
|         now = time.time() | ||||
|         import brotli | ||||
|         from . import forms | ||||
|  | ||||
|         text_after_filter = '' | ||||
|         tmp_watch = deepcopy(datastore.data['watching'].get(uuid)) | ||||
|  | ||||
|         if tmp_watch and tmp_watch.history and os.path.isdir(tmp_watch.watch_data_dir): | ||||
|             # Splice in the temporary stuff from the form | ||||
|             form = forms.processor_text_json_diff_form(formdata=request.form if request.method == 'POST' else None, | ||||
|                                                        data=request.form | ||||
|                                                        ) | ||||
|             # Only update vars that came in via the AJAX post | ||||
|             p = {k: v for k, v in form.data.items() if k in request.form.keys()} | ||||
|             tmp_watch.update(p) | ||||
|  | ||||
|             latest_filename = next(reversed(tmp_watch.history)) | ||||
|             html_fname = os.path.join(tmp_watch.watch_data_dir, f"{latest_filename}.html.br") | ||||
|             with open(html_fname, 'rb') as f: | ||||
|                 decompressed_data = brotli.decompress(f.read()).decode('utf-8') if html_fname.endswith('.br') else f.read().decode('utf-8') | ||||
|  | ||||
|                 # Just like a normal change detection except provide a fake "watch" object and dont call .call_browser() | ||||
|                 processor_module = importlib.import_module("changedetectionio.processors.text_json_diff.processor") | ||||
|                 update_handler = processor_module.perform_site_check(datastore=datastore, | ||||
|                                                                      watch_uuid=uuid # probably not needed anymore anyway? | ||||
|                                                                      ) | ||||
|                 # Use the last loaded HTML as the input | ||||
|                 update_handler.fetcher.content = decompressed_data | ||||
|                 update_handler.fetcher.headers['content-type'] = tmp_watch.get('content-type') | ||||
|                 try: | ||||
|                     changed_detected, update_obj, text_after_filter = update_handler.run_changedetection( | ||||
|                         watch=tmp_watch, | ||||
|                         skip_when_checksum_same=False, | ||||
|                     ) | ||||
|                 except FilterNotFoundInResponse as e: | ||||
|                     text_after_filter = f"Filter not found in HTML: {str(e)}" | ||||
|                 except ReplyWithContentButNoText as e: | ||||
|                     text_after_filter = f"Filter found but no text (empty result)" | ||||
|                 except Exception as e: | ||||
|                     text_after_filter = f"Error: {str(e)}" | ||||
|  | ||||
|             if not text_after_filter.strip(): | ||||
|                 text_after_filter = 'Empty content' | ||||
|  | ||||
|         # because run_changedetection always returns bytes due to saving the snapshots etc | ||||
|         text_after_filter = text_after_filter.decode('utf-8') if isinstance(text_after_filter, bytes) else text_after_filter | ||||
|  | ||||
|         do_anchor = datastore.data["settings"]["application"].get("render_anchor_tag_content", False) | ||||
|  | ||||
|         trigger_line_numbers = [] | ||||
|         try: | ||||
|             text_before_filter = html_tools.html_to_text(html_content=decompressed_data, | ||||
|                                                          render_anchor_tag_content=do_anchor) | ||||
|  | ||||
|             trigger_line_numbers = html_tools.strip_ignore_text(content=text_after_filter, | ||||
|                                                                 wordlist=tmp_watch['trigger_text'], | ||||
|                                                                 mode='line numbers' | ||||
|                                                                 ) | ||||
|         except Exception as e: | ||||
|             text_before_filter = f"Error: {str(e)}" | ||||
|  | ||||
|         logger.trace(f"Parsed in {time.time() - now:.3f}s") | ||||
|  | ||||
|         return jsonify( | ||||
|             { | ||||
|                 'after_filter': text_after_filter, | ||||
|                 'before_filter': text_before_filter.decode('utf-8') if isinstance(text_before_filter, bytes) else text_before_filter, | ||||
|                 'trigger_line_numbers': trigger_line_numbers | ||||
|             } | ||||
|         ) | ||||
|         from .processors.text_json_diff import prepare_filter_prevew | ||||
|         return prepare_filter_prevew(watch_uuid=uuid, datastore=datastore) | ||||
|  | ||||
|  | ||||
|     @app.route("/form/add/quickwatch", methods=['POST']) | ||||
| @@ -1469,7 +1401,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|         url = request.form.get('url').strip() | ||||
|         if datastore.url_exists(url): | ||||
|             flash(f'Warning, URL {url} already exists', "notice") | ||||
|              | ||||
|  | ||||
|         add_paused = request.form.get('edit_and_watch_submit_button') != None | ||||
|         processor = request.form.get('processor', 'text_json_diff') | ||||
|         new_uuid = datastore.add_watch(url=url, tag=request.form.get('tags').strip(), extras={'paused': add_paused, 'processor': processor}) | ||||
| @@ -1477,13 +1409,13 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|         if new_uuid: | ||||
|             if add_paused: | ||||
|                 flash('Watch added in Paused state, saving will unpause.') | ||||
|                 return redirect(url_for('edit_page', uuid=new_uuid, unpause_on_save=1)) | ||||
|                 return redirect(url_for('edit_page', uuid=new_uuid, unpause_on_save=1, tag=request.args.get('tag'))) | ||||
|             else: | ||||
|                 # Straight into the queue. | ||||
|                 update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid})) | ||||
|                 flash("Watch added.") | ||||
|  | ||||
|         return redirect(url_for('index')) | ||||
|         return redirect(url_for('index', tag=request.args.get('tag',''))) | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -1515,7 +1447,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|         new_uuid = datastore.clone(uuid) | ||||
|         if new_uuid: | ||||
|             if not datastore.data['watching'].get(uuid).get('paused'): | ||||
|                 update_q.put(queuedWatchMetaData.PrioritizedItem(priority=5, item={'uuid': new_uuid, 'skip_when_checksum_same': True})) | ||||
|                 update_q.put(queuedWatchMetaData.PrioritizedItem(priority=5, item={'uuid': new_uuid})) | ||||
|             flash('Cloned.') | ||||
|  | ||||
|         return redirect(url_for('index')) | ||||
| @@ -1536,7 +1468,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|  | ||||
|         if uuid: | ||||
|             if uuid not in running_uuids: | ||||
|                 update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False})) | ||||
|                 update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|             i = 1 | ||||
|  | ||||
|         elif tag: | ||||
| @@ -1547,7 +1479,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                         continue | ||||
|                     if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']: | ||||
|                         update_q.put( | ||||
|                             queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False}) | ||||
|                             queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid}) | ||||
|                         ) | ||||
|                         i += 1 | ||||
|  | ||||
| @@ -1557,9 +1489,8 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                 if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']: | ||||
|                     if with_errors and not watch.get('last_error'): | ||||
|                         continue | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False})) | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid})) | ||||
|                     i += 1 | ||||
|  | ||||
|         flash(f"{i} watches queued for rechecking.") | ||||
|         return redirect(url_for('index', tag=tag)) | ||||
|  | ||||
| @@ -1616,7 +1547,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                 uuid = uuid.strip() | ||||
|                 if datastore.data['watching'].get(uuid): | ||||
|                     # Recheck and require a full reprocessing | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False})) | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid})) | ||||
|             flash("{} watches queued for rechecking".format(len(uuids))) | ||||
|  | ||||
|         elif (op == 'clear-errors'): | ||||
| @@ -1751,13 +1682,15 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|     import changedetectionio.blueprint.check_proxies as check_proxies | ||||
|     app.register_blueprint(check_proxies.construct_blueprint(datastore=datastore), url_prefix='/check_proxy') | ||||
|  | ||||
|     import changedetectionio.blueprint.backups as backups | ||||
|     app.register_blueprint(backups.construct_blueprint(datastore), url_prefix='/backups') | ||||
|  | ||||
|     # @todo handle ctrl break | ||||
|     ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start() | ||||
|     threading.Thread(target=notification_runner).start() | ||||
|  | ||||
|     # Check for new release version, but not when running in test/build or pytest | ||||
|     if not os.getenv("GITHUB_REF", False) and not config.get('disable_checkver') == True: | ||||
|     if not os.getenv("GITHUB_REF", False) and not strtobool(os.getenv('DISABLE_VERSION_CHECK', 'no')): | ||||
|         threading.Thread(target=check_for_new_version).start() | ||||
|  | ||||
|     return app | ||||
| @@ -1841,7 +1774,6 @@ def notification_runner(): | ||||
| def ticker_thread_check_time_launch_checks(): | ||||
|     import random | ||||
|     from changedetectionio import update_worker | ||||
|  | ||||
|     proxy_last_called_time = {} | ||||
|  | ||||
|     recheck_time_minimum_seconds = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 3)) | ||||
| @@ -1875,12 +1807,14 @@ def ticker_thread_check_time_launch_checks(): | ||||
|             except RuntimeError as e: | ||||
|                 # RuntimeError: dictionary changed size during iteration | ||||
|                 time.sleep(0.1) | ||||
|                 watch_uuid_list = [] | ||||
|             else: | ||||
|                 break | ||||
|  | ||||
|         # Re #438 - Don't place more watches in the queue to be checked if the queue is already large | ||||
|         while update_q.qsize() >= 2000: | ||||
|             time.sleep(1) | ||||
|             logger.warning(f"Recheck watches queue size limit reached ({MAX_QUEUE_SIZE}), skipping adding more items") | ||||
|             time.sleep(3) | ||||
|  | ||||
|  | ||||
|         recheck_time_system_seconds = int(datastore.threshold_seconds) | ||||
| @@ -1897,6 +1831,28 @@ def ticker_thread_check_time_launch_checks(): | ||||
|             if watch['paused']: | ||||
|                 continue | ||||
|  | ||||
|             # @todo - Maybe make this a hook? | ||||
|             # Time schedule limit - Decide between watch or global settings | ||||
|             if watch.get('time_between_check_use_default'): | ||||
|                 time_schedule_limit = datastore.data['settings']['requests'].get('time_schedule_limit', {}) | ||||
|                 logger.trace(f"{uuid} Time scheduler - Using system/global settings") | ||||
|             else: | ||||
|                 time_schedule_limit = watch.get('time_schedule_limit') | ||||
|                 logger.trace(f"{uuid} Time scheduler - Using watch settings (not global settings)") | ||||
|             tz_name = datastore.data['settings']['application'].get('timezone', 'UTC') | ||||
|  | ||||
|             if time_schedule_limit and time_schedule_limit.get('enabled'): | ||||
|                 try: | ||||
|                     result = is_within_schedule(time_schedule_limit=time_schedule_limit, | ||||
|                                                 default_tz=tz_name | ||||
|                                                 ) | ||||
|                     if not result: | ||||
|                         logger.trace(f"{uuid} Time scheduler - not within schedule skipping.") | ||||
|                         continue | ||||
|                 except Exception as e: | ||||
|                     logger.error( | ||||
|                         f"{uuid} - Recheck scheduler, error handling timezone, check skipped - TZ name '{tz_name}' - {str(e)}") | ||||
|                     return False | ||||
|             # If they supplied an individual entry minutes to threshold. | ||||
|             threshold = recheck_time_system_seconds if watch.get('time_between_check_use_default') else watch.threshold_seconds() | ||||
|  | ||||
| @@ -1940,7 +1896,7 @@ def ticker_thread_check_time_launch_checks(): | ||||
|                         f"{now - watch['last_checked']:0.2f}s since last checked") | ||||
|  | ||||
|                     # Into the queue with you | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=priority, item={'uuid': uuid, 'skip_when_checksum_same': True})) | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=priority, item={'uuid': uuid})) | ||||
|  | ||||
|                     # Reset for next time | ||||
|                     watch.jitter_seconds = 0 | ||||
|   | ||||
| @@ -1,12 +1,14 @@ | ||||
| import os | ||||
| import re | ||||
| from loguru import logger | ||||
| from wtforms.widgets.core import TimeInput | ||||
|  | ||||
| from changedetectionio.strtobool import strtobool | ||||
|  | ||||
| from wtforms import ( | ||||
|     BooleanField, | ||||
|     Form, | ||||
|     Field, | ||||
|     IntegerField, | ||||
|     RadioField, | ||||
|     SelectField, | ||||
| @@ -125,6 +127,87 @@ class StringTagUUID(StringField): | ||||
|  | ||||
|         return 'error' | ||||
|  | ||||
| class TimeDurationForm(Form): | ||||
|     hours = SelectField(choices=[(f"{i}", f"{i}") for i in range(0, 25)], default="24",  validators=[validators.Optional()]) | ||||
|     minutes = SelectField(choices=[(f"{i}", f"{i}") for i in range(0, 60)], default="00", validators=[validators.Optional()]) | ||||
|  | ||||
| class TimeStringField(Field): | ||||
|     """ | ||||
|     A WTForms field for time inputs (HH:MM) that stores the value as a string. | ||||
|     """ | ||||
|     widget = TimeInput()  # Use the built-in time input widget | ||||
|  | ||||
|     def _value(self): | ||||
|         """ | ||||
|         Returns the value for rendering in the form. | ||||
|         """ | ||||
|         return self.data if self.data is not None else "" | ||||
|  | ||||
|     def process_formdata(self, valuelist): | ||||
|         """ | ||||
|         Processes the raw input from the form and stores it as a string. | ||||
|         """ | ||||
|         if valuelist: | ||||
|             time_str = valuelist[0] | ||||
|             # Simple validation for HH:MM format | ||||
|             if not time_str or len(time_str.split(":")) != 2: | ||||
|                 raise ValidationError("Invalid time format. Use HH:MM.") | ||||
|             self.data = time_str | ||||
|  | ||||
|  | ||||
| class validateTimeZoneName(object): | ||||
|     """ | ||||
|        Flask wtform validators wont work with basic auth | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, message=None): | ||||
|         self.message = message | ||||
|  | ||||
|     def __call__(self, form, field): | ||||
|         from zoneinfo import available_timezones | ||||
|         python_timezones = available_timezones() | ||||
|         if field.data and field.data not in python_timezones: | ||||
|             raise ValidationError("Not a valid timezone name") | ||||
|  | ||||
| class ScheduleLimitDaySubForm(Form): | ||||
|     enabled = BooleanField("not set", default=True) | ||||
|     start_time = TimeStringField("Start At", default="00:00", render_kw={"placeholder": "HH:MM"}, validators=[validators.Optional()]) | ||||
|     duration = FormField(TimeDurationForm, label="Run duration") | ||||
|  | ||||
| class ScheduleLimitForm(Form): | ||||
|     enabled = BooleanField("Use time scheduler", default=False) | ||||
|     # Because the label for=""" doesnt line up/work with the actual checkbox | ||||
|     monday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|     tuesday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|     wednesday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|     thursday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|     friday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|     saturday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|     sunday = FormField(ScheduleLimitDaySubForm, label="") | ||||
|  | ||||
|     timezone = StringField("Optional timezone to run in", | ||||
|                                   render_kw={"list": "timezones"}, | ||||
|                                   validators=[validateTimeZoneName()] | ||||
|                                   ) | ||||
|     def __init__( | ||||
|         self, | ||||
|         formdata=None, | ||||
|         obj=None, | ||||
|         prefix="", | ||||
|         data=None, | ||||
|         meta=None, | ||||
|         **kwargs, | ||||
|     ): | ||||
|         super().__init__(formdata, obj, prefix, data, meta, **kwargs) | ||||
|         self.monday.form.enabled.label.text="Monday" | ||||
|         self.tuesday.form.enabled.label.text = "Tuesday" | ||||
|         self.wednesday.form.enabled.label.text = "Wednesday" | ||||
|         self.thursday.form.enabled.label.text = "Thursday" | ||||
|         self.friday.form.enabled.label.text = "Friday" | ||||
|         self.saturday.form.enabled.label.text = "Saturday" | ||||
|         self.sunday.form.enabled.label.text = "Sunday" | ||||
|  | ||||
|  | ||||
| class TimeBetweenCheckForm(Form): | ||||
|     weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")]) | ||||
|     days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")]) | ||||
| @@ -225,8 +308,12 @@ class ValidateAppRiseServers(object): | ||||
|         # so that the custom endpoints are registered | ||||
|         from changedetectionio.apprise_plugin import apprise_custom_api_call_wrapper | ||||
|         for server_url in field.data: | ||||
|             if not apobj.add(server_url): | ||||
|                 message = field.gettext('\'%s\' is not a valid AppRise URL.' % (server_url)) | ||||
|             url = server_url.strip() | ||||
|             if url.startswith("#"): | ||||
|                 continue | ||||
|  | ||||
|             if not apobj.add(url): | ||||
|                 message = field.gettext('\'%s\' is not a valid AppRise URL.' % (url)) | ||||
|                 raise ValidationError(message) | ||||
|  | ||||
| class ValidateJinja2Template(object): | ||||
| @@ -279,6 +366,7 @@ class validateURL(object): | ||||
|         # This should raise a ValidationError() or not | ||||
|         validate_url(field.data) | ||||
|  | ||||
|  | ||||
| def validate_url(test_url): | ||||
|     # If hosts that only contain alphanumerics are allowed ("localhost" for example) | ||||
|     try: | ||||
| @@ -438,6 +526,7 @@ class commonSettingsForm(Form): | ||||
|     notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()]) | ||||
|     notification_urls = StringListField('Notification URL List', validators=[validators.Optional(), ValidateAppRiseServers(), ValidateJinja2Template()]) | ||||
|     processor = RadioField( label=u"Processor - What do you want to achieve?", choices=processors.available_processors(), default="text_json_diff") | ||||
|     timezone = StringField("Timezone for watch schedule", render_kw={"list": "timezones"}, validators=[validateTimeZoneName()]) | ||||
|     webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, message="Should contain one or more seconds")]) | ||||
|  | ||||
|  | ||||
| @@ -448,7 +537,6 @@ class importForm(Form): | ||||
|     xlsx_file = FileField('Upload .xlsx file', validators=[FileAllowed(['xlsx'], 'Must be .xlsx file!')]) | ||||
|     file_mapping = SelectField('File mapping', [validators.DataRequired()], choices={('wachete', 'Wachete mapping'), ('custom','Custom mapping')}) | ||||
|  | ||||
|  | ||||
| class SingleBrowserStep(Form): | ||||
|  | ||||
|     operation = SelectField('Operation', [validators.Optional()], choices=browser_step_ui_config.keys()) | ||||
| @@ -466,6 +554,9 @@ class processor_text_json_diff_form(commonSettingsForm): | ||||
|     tags = StringTagUUID('Group tag', [validators.Optional()], default='') | ||||
|  | ||||
|     time_between_check = FormField(TimeBetweenCheckForm) | ||||
|  | ||||
|     time_schedule_limit = FormField(ScheduleLimitForm) | ||||
|  | ||||
|     time_between_check_use_default = BooleanField('Use global settings for time between check', default=False) | ||||
|  | ||||
|     include_filters = StringListField('CSS/JSONPath/JQ/XPath Filters', [ValidateCSSJSONXPATHInput()], default='') | ||||
| @@ -476,7 +567,7 @@ class processor_text_json_diff_form(commonSettingsForm): | ||||
|  | ||||
|     title = StringField('Title', default='') | ||||
|  | ||||
|     ignore_text = StringListField('Remove lines containing', [ValidateListRegex()]) | ||||
|     ignore_text = StringListField('Ignore lines containing', [ValidateListRegex()]) | ||||
|     headers = StringDictKeyValue('Request headers') | ||||
|     body = TextAreaField('Request body', [validators.Optional()]) | ||||
|     method = SelectField('Request method', choices=valid_method, default=default_method) | ||||
| @@ -496,7 +587,7 @@ class processor_text_json_diff_form(commonSettingsForm): | ||||
|     text_should_not_be_present = StringListField('Block change-detection while text matches', [validators.Optional(), ValidateListRegex()]) | ||||
|     webdriver_js_execute_code = TextAreaField('Execute JavaScript before change detection', render_kw={"rows": "5"}, validators=[validators.Optional()]) | ||||
|  | ||||
|     save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|     save_button = SubmitField('Save', render_kw={"class": "pure-button button-small pure-button-primary"}) | ||||
|  | ||||
|     proxy = RadioField('Proxy') | ||||
|     filter_failure_notification_send = BooleanField( | ||||
| @@ -515,6 +606,7 @@ class processor_text_json_diff_form(commonSettingsForm): | ||||
|         if not super().validate(): | ||||
|             return False | ||||
|  | ||||
|         from changedetectionio.safe_jinja import render as jinja_render | ||||
|         result = True | ||||
|  | ||||
|         # Fail form validation when a body is set for a GET | ||||
| @@ -524,20 +616,65 @@ class processor_text_json_diff_form(commonSettingsForm): | ||||
|  | ||||
|         # Attempt to validate jinja2 templates in the URL | ||||
|         try: | ||||
|             from changedetectionio.safe_jinja import render as jinja_render | ||||
|             jinja_render(template_str=self.url.data) | ||||
|         except ModuleNotFoundError as e: | ||||
|             # incase jinja2_time or others is missing | ||||
|             logger.error(e) | ||||
|             self.url.errors.append(e) | ||||
|             self.url.errors.append(f'Invalid template syntax configuration: {e}') | ||||
|             result = False | ||||
|         except Exception as e: | ||||
|             logger.error(e) | ||||
|             self.url.errors.append('Invalid template syntax') | ||||
|             self.url.errors.append(f'Invalid template syntax: {e}') | ||||
|             result = False | ||||
|  | ||||
|         # Attempt to validate jinja2 templates in the body | ||||
|         if self.body.data and self.body.data.strip(): | ||||
|             try: | ||||
|                 jinja_render(template_str=self.body.data) | ||||
|             except ModuleNotFoundError as e: | ||||
|                 # incase jinja2_time or others is missing | ||||
|                 logger.error(e) | ||||
|                 self.body.errors.append(f'Invalid template syntax configuration: {e}') | ||||
|                 result = False | ||||
|             except Exception as e: | ||||
|                 logger.error(e) | ||||
|                 self.body.errors.append(f'Invalid template syntax: {e}') | ||||
|                 result = False | ||||
|  | ||||
|         # Attempt to validate jinja2 templates in the headers | ||||
|         if len(self.headers.data) > 0: | ||||
|             try: | ||||
|                 for header, value in self.headers.data.items(): | ||||
|                     jinja_render(template_str=value) | ||||
|             except ModuleNotFoundError as e: | ||||
|                 # incase jinja2_time or others is missing | ||||
|                 logger.error(e) | ||||
|                 self.headers.errors.append(f'Invalid template syntax configuration: {e}') | ||||
|                 result = False | ||||
|             except Exception as e: | ||||
|                 logger.error(e) | ||||
|                 self.headers.errors.append(f'Invalid template syntax in "{header}" header: {e}') | ||||
|                 result = False | ||||
|  | ||||
|         return result | ||||
|  | ||||
|     def __init__( | ||||
|             self, | ||||
|             formdata=None, | ||||
|             obj=None, | ||||
|             prefix="", | ||||
|             data=None, | ||||
|             meta=None, | ||||
|             **kwargs, | ||||
|     ): | ||||
|         super().__init__(formdata, obj, prefix, data, meta, **kwargs) | ||||
|         if kwargs and kwargs.get('default_system_settings'): | ||||
|             default_tz = kwargs.get('default_system_settings').get('application', {}).get('timezone') | ||||
|             if default_tz: | ||||
|                 self.time_schedule_limit.form.timezone.render_kw['placeholder'] = default_tz | ||||
|  | ||||
|  | ||||
|  | ||||
| class SingleExtraProxy(Form): | ||||
|  | ||||
|     # maybe better to set some <script>var.. | ||||
| @@ -558,6 +695,7 @@ class DefaultUAInputForm(Form): | ||||
| # datastore.data['settings']['requests'].. | ||||
| class globalSettingsRequestForm(Form): | ||||
|     time_between_check = FormField(TimeBetweenCheckForm) | ||||
|     time_schedule_limit = FormField(ScheduleLimitForm) | ||||
|     proxy = RadioField('Proxy') | ||||
|     jitter_seconds = IntegerField('Random jitter seconds ± check', | ||||
|                                   render_kw={"style": "width: 5em;"}, | ||||
| @@ -616,7 +754,7 @@ class globalSettingsForm(Form): | ||||
|  | ||||
|     requests = FormField(globalSettingsRequestForm) | ||||
|     application = FormField(globalSettingsApplicationForm) | ||||
|     save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|     save_button = SubmitField('Save', render_kw={"class": "pure-button button-small pure-button-primary"}) | ||||
|  | ||||
|  | ||||
| class extractDataForm(Form): | ||||
|   | ||||
| @@ -1,13 +1,14 @@ | ||||
| from typing import List | ||||
| from loguru import logger | ||||
| from lxml import etree | ||||
| from typing import List | ||||
| import json | ||||
| import re | ||||
|  | ||||
|  | ||||
| # HTML added to be sure each result matching a filter (.example) gets converted to a new line by Inscriptis | ||||
| TEXT_FILTER_LIST_LINE_SUFFIX = "<br>" | ||||
|  | ||||
| TRANSLATE_WHITESPACE_TABLE = str.maketrans('', '', '\r\n\t ') | ||||
| PERL_STYLE_REGEX = r'^/(.*?)/([a-z]*)?$' | ||||
|  | ||||
| # 'price' , 'lowPrice', 'highPrice' are usually under here | ||||
| # All of those may or may not appear on different websites - I didnt find a way todo case-insensitive searching here | ||||
| LD_JSON_PRODUCT_OFFER_SELECTORS = ["json:$..offers", "json:$..Offers"] | ||||
| @@ -54,29 +55,64 @@ def include_filters(include_filters, html_content, append_pretty_line_formatting | ||||
| def subtractive_css_selector(css_selector, html_content): | ||||
|     from bs4 import BeautifulSoup | ||||
|     soup = BeautifulSoup(html_content, "html.parser") | ||||
|     for item in soup.select(css_selector): | ||||
|  | ||||
|     # So that the elements dont shift their index, build a list of elements here which will be pointers to their place in the DOM | ||||
|     elements_to_remove = soup.select(css_selector) | ||||
|  | ||||
|     # Then, remove them in a separate loop | ||||
|     for item in elements_to_remove: | ||||
|         item.decompose() | ||||
|  | ||||
|     return str(soup) | ||||
|  | ||||
| def subtractive_xpath_selector(xpath_selector, html_content):  | ||||
| def subtractive_xpath_selector(selectors: List[str], html_content: str) -> str: | ||||
|     # Parse the HTML content using lxml | ||||
|     html_tree = etree.HTML(html_content) | ||||
|     elements_to_remove = html_tree.xpath(xpath_selector) | ||||
|  | ||||
|     # First, collect all elements to remove | ||||
|     elements_to_remove = [] | ||||
|  | ||||
|     # Iterate over the list of XPath selectors | ||||
|     for selector in selectors: | ||||
|         # Collect elements for each selector | ||||
|         elements_to_remove.extend(html_tree.xpath(selector)) | ||||
|  | ||||
|     # Then, remove them in a separate loop | ||||
|     for element in elements_to_remove: | ||||
|         element.getparent().remove(element) | ||||
|         if element.getparent() is not None:  # Ensure the element has a parent before removing | ||||
|             element.getparent().remove(element) | ||||
|  | ||||
|     # Convert the modified HTML tree back to a string | ||||
|     modified_html = etree.tostring(html_tree, method="html").decode("utf-8") | ||||
|     return modified_html | ||||
|  | ||||
|  | ||||
| def element_removal(selectors: List[str], html_content): | ||||
|     """Removes elements that match a list of CSS or xPath selectors.""" | ||||
|     """Removes elements that match a list of CSS or XPath selectors.""" | ||||
|     modified_html = html_content | ||||
|     css_selectors = [] | ||||
|     xpath_selectors = [] | ||||
|  | ||||
|     for selector in selectors: | ||||
|         if selector.startswith(('xpath:', 'xpath1:', '//')): | ||||
|             # Handle XPath selectors separately | ||||
|             xpath_selector = selector.removeprefix('xpath:').removeprefix('xpath1:') | ||||
|             modified_html = subtractive_xpath_selector(xpath_selector, modified_html) | ||||
|             xpath_selectors.append(xpath_selector) | ||||
|         else: | ||||
|             modified_html = subtractive_css_selector(selector, modified_html) | ||||
|             # Collect CSS selectors as one "hit", see comment in subtractive_css_selector | ||||
|             css_selectors.append(selector.strip().strip(",")) | ||||
|  | ||||
|     if xpath_selectors: | ||||
|         modified_html = subtractive_xpath_selector(xpath_selectors, modified_html) | ||||
|  | ||||
|     if css_selectors: | ||||
|         # Remove duplicates, then combine all CSS selectors into one string, separated by commas | ||||
|         # This stops the elements index shifting | ||||
|         unique_selectors = list(set(css_selectors))  # Ensure uniqueness | ||||
|         combined_css_selector = " , ".join(unique_selectors) | ||||
|         modified_html = subtractive_css_selector(combined_css_selector, modified_html) | ||||
|  | ||||
|  | ||||
|     return modified_html | ||||
|  | ||||
| def elementpath_tostring(obj): | ||||
| @@ -263,8 +299,10 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None | ||||
| # https://github.com/dgtlmoon/changedetection.io/pull/2041#issuecomment-1848397161w | ||||
|     # Try to parse/filter out the JSON, if we get some parser error, then maybe it's embedded within HTML tags | ||||
|     try: | ||||
|         stripped_text_from_html = _parse_json(json.loads(content), json_filter) | ||||
|     except json.JSONDecodeError: | ||||
|         # .lstrip("\ufeff") strings ByteOrderMark from UTF8 and still lets the UTF work | ||||
|         stripped_text_from_html = _parse_json(json.loads(content.lstrip("\ufeff") ), json_filter) | ||||
|     except json.JSONDecodeError as e: | ||||
|         logger.warning(str(e)) | ||||
|  | ||||
|         # Foreach <script json></script> blob.. just return the first that matches json_filter | ||||
|         # As a last resort, try to parse the whole <body> | ||||
| @@ -326,6 +364,7 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None | ||||
| #          - "line numbers" return a list of line numbers that match (int list) | ||||
| # | ||||
| # wordlist - list of regex's (str) or words (str) | ||||
| # Preserves all linefeeds and other whitespacing, its not the job of this to remove that | ||||
| def strip_ignore_text(content, wordlist, mode="content"): | ||||
|     i = 0 | ||||
|     output = [] | ||||
| @@ -341,32 +380,30 @@ def strip_ignore_text(content, wordlist, mode="content"): | ||||
|         else: | ||||
|             ignore_text.append(k.strip()) | ||||
|  | ||||
|     for line in content.splitlines(): | ||||
|     for line in content.splitlines(keepends=True): | ||||
|         i += 1 | ||||
|         # Always ignore blank lines in this mode. (when this function gets called) | ||||
|         got_match = False | ||||
|         if len(line.strip()): | ||||
|             for l in ignore_text: | ||||
|                 if l.lower() in line.lower(): | ||||
|         for l in ignore_text: | ||||
|             if l.lower() in line.lower(): | ||||
|                 got_match = True | ||||
|  | ||||
|         if not got_match: | ||||
|             for r in ignore_regex: | ||||
|                 if r.search(line): | ||||
|                     got_match = True | ||||
|  | ||||
|             if not got_match: | ||||
|                 for r in ignore_regex: | ||||
|                     if r.search(line): | ||||
|                         got_match = True | ||||
|  | ||||
|             if not got_match: | ||||
|                 # Not ignored | ||||
|                 output.append(line.encode('utf8')) | ||||
|             else: | ||||
|                 ignored_line_numbers.append(i) | ||||
|  | ||||
|         if not got_match: | ||||
|             # Not ignored, and should preserve "keepends" | ||||
|             output.append(line) | ||||
|         else: | ||||
|             ignored_line_numbers.append(i) | ||||
|  | ||||
|     # Used for finding out what to highlight | ||||
|     if mode == "line numbers": | ||||
|         return ignored_line_numbers | ||||
|  | ||||
|     return "\n".encode('utf8').join(output) | ||||
|     return ''.join(output) | ||||
|  | ||||
| def cdata_in_document_to_text(html_content: str, render_anchor_tag_content=False) -> str: | ||||
|     from xml.sax.saxutils import escape as xml_escape | ||||
|   | ||||
| @@ -52,7 +52,8 @@ class model(dict): | ||||
|                     'schema_version' : 0, | ||||
|                     'shared_diff_access': False, | ||||
|                     'webdriver_delay': None , # Extra delay in seconds before extracting text | ||||
|                     'tags': {} #@todo use Tag.model initialisers | ||||
|                     'tags': {}, #@todo use Tag.model initialisers | ||||
|                     'timezone': None, # Default IANA timezone name | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
| @@ -68,7 +69,7 @@ def parse_headers_from_text_file(filepath): | ||||
|         for l in f.readlines(): | ||||
|             l = l.strip() | ||||
|             if not l.startswith('#') and ':' in l: | ||||
|                 (k, v) = l.split(':') | ||||
|                 (k, v) = l.split(':', 1)  # Split only on the first colon | ||||
|                 headers[k.strip()] = v.strip() | ||||
|  | ||||
|     return headers | ||||
| @@ -6,6 +6,8 @@ import re | ||||
| from pathlib import Path | ||||
| from loguru import logger | ||||
|  | ||||
| from ..html_tools import TRANSLATE_WHITESPACE_TABLE | ||||
|  | ||||
| # Allowable protocols, protects against javascript: etc | ||||
| # file:// is further checked by ALLOW_FILE_URI | ||||
| SAFE_PROTOCOL_REGEX='^(http|https|ftp|file):' | ||||
| @@ -36,8 +38,9 @@ class model(watch_base): | ||||
|     jitter_seconds = 0 | ||||
|  | ||||
|     def __init__(self, *arg, **kw): | ||||
|         self.__datastore_path = kw['datastore_path'] | ||||
|         del kw['datastore_path'] | ||||
|         self.__datastore_path = kw.get('datastore_path') | ||||
|         if kw.get('datastore_path'): | ||||
|             del kw['datastore_path'] | ||||
|         super(model, self).__init__(*arg, **kw) | ||||
|         if kw.get('default'): | ||||
|             self.update(kw['default']) | ||||
| @@ -86,6 +89,10 @@ class model(watch_base): | ||||
|  | ||||
|         if ready_url.startswith('source:'): | ||||
|             ready_url=ready_url.replace('source:', '') | ||||
|  | ||||
|         # Also double check it after any Jinja2 formatting just incase | ||||
|         if not is_safe_url(ready_url): | ||||
|             return 'DISABLED' | ||||
|         return ready_url | ||||
|  | ||||
|     def clear_watch(self): | ||||
| @@ -171,6 +178,10 @@ class model(watch_base): | ||||
|         """ | ||||
|         tmp_history = {} | ||||
|  | ||||
|         # In the case we are only using the watch for processing without history | ||||
|         if not self.watch_data_dir: | ||||
|             return [] | ||||
|  | ||||
|         # Read the history file as a dict | ||||
|         fname = os.path.join(self.watch_data_dir, "history.txt") | ||||
|         if os.path.isfile(fname): | ||||
| @@ -236,37 +247,32 @@ class model(watch_base): | ||||
|         bump = self.history | ||||
|         return self.__newest_history_key | ||||
|  | ||||
|     # Given an arbitrary timestamp, find the closest next key | ||||
|     # For example, last_viewed = 1000 so it should return the next 1001 timestamp | ||||
|     # | ||||
|     # used for the [diff] button so it can preset a smarter from_version | ||||
|     # Given an arbitrary timestamp, find the best history key for the [diff] button so it can preset a smarter from_version | ||||
|     @property | ||||
|     def get_next_snapshot_key_to_last_viewed(self): | ||||
|     def get_from_version_based_on_last_viewed(self): | ||||
|  | ||||
|         """Unfortunately for now timestamp is stored as string key""" | ||||
|         keys = list(self.history.keys()) | ||||
|         if not keys: | ||||
|             return None | ||||
|         if len(keys) == 1: | ||||
|             return keys[0] | ||||
|  | ||||
|         last_viewed = int(self.get('last_viewed')) | ||||
|         prev_k = keys[0] | ||||
|         sorted_keys = sorted(keys, key=lambda x: int(x)) | ||||
|         sorted_keys.reverse() | ||||
|  | ||||
|         # When the 'last viewed' timestamp is greater than the newest snapshot, return second last | ||||
|         if last_viewed > int(sorted_keys[0]): | ||||
|         # When the 'last viewed' timestamp is greater than or equal the newest snapshot, return second newest | ||||
|         if last_viewed >= int(sorted_keys[0]): | ||||
|             return sorted_keys[1] | ||||
|          | ||||
|         # When the 'last viewed' timestamp is between snapshots, return the older snapshot | ||||
|         for newer, older in list(zip(sorted_keys[0:], sorted_keys[1:])): | ||||
|             if last_viewed < int(newer) and last_viewed >= int(older): | ||||
|                 return older | ||||
|  | ||||
|         for k in sorted_keys: | ||||
|             if int(k) < last_viewed: | ||||
|                 if prev_k == sorted_keys[0]: | ||||
|                     # Return the second last one so we dont recommend the same version compares itself | ||||
|                     return sorted_keys[1] | ||||
|  | ||||
|                 return prev_k | ||||
|             prev_k = k | ||||
|  | ||||
|         return keys[0] | ||||
|         # When the 'last viewed' timestamp is less than the oldest snapshot, return oldest | ||||
|         return sorted_keys[-1] | ||||
|  | ||||
|     def get_history_snapshot(self, timestamp): | ||||
|         import brotli | ||||
| @@ -307,13 +313,13 @@ class model(watch_base): | ||||
|             dest = os.path.join(self.watch_data_dir, snapshot_fname) | ||||
|             if not os.path.exists(dest): | ||||
|                 with open(dest, 'wb') as f: | ||||
|                     f.write(brotli.compress(contents, mode=brotli.MODE_TEXT)) | ||||
|                     f.write(brotli.compress(contents.encode('utf-8'), mode=brotli.MODE_TEXT)) | ||||
|         else: | ||||
|             snapshot_fname = f"{snapshot_id}.txt" | ||||
|             dest = os.path.join(self.watch_data_dir, snapshot_fname) | ||||
|             if not os.path.exists(dest): | ||||
|                 with open(dest, 'wb') as f: | ||||
|                     f.write(contents) | ||||
|                     f.write(contents.encode('utf-8')) | ||||
|  | ||||
|         # Append to index | ||||
|         # @todo check last char was \n | ||||
| @@ -328,7 +334,6 @@ class model(watch_base): | ||||
|         # @todo bump static cache of the last timestamp so we dont need to examine the file to set a proper ''viewed'' status | ||||
|         return snapshot_fname | ||||
|  | ||||
|     @property | ||||
|     @property | ||||
|     def has_empty_checktime(self): | ||||
|         # using all() + dictionary comprehension | ||||
| @@ -345,14 +350,32 @@ class model(watch_base): | ||||
|         return seconds | ||||
|  | ||||
|     # Iterate over all history texts and see if something new exists | ||||
|     def lines_contain_something_unique_compared_to_history(self, lines: list): | ||||
|         local_lines = set([l.decode('utf-8').strip().lower() for l in lines]) | ||||
|     # Always applying .strip() to start/end but optionally replace any other whitespace | ||||
|     def lines_contain_something_unique_compared_to_history(self, lines: list, ignore_whitespace=False): | ||||
|         local_lines = [] | ||||
|         if lines: | ||||
|             if ignore_whitespace: | ||||
|                 if isinstance(lines[0], str): # Can be either str or bytes depending on what was on the disk | ||||
|                     local_lines = set([l.translate(TRANSLATE_WHITESPACE_TABLE).lower() for l in lines]) | ||||
|                 else: | ||||
|                     local_lines = set([l.decode('utf-8').translate(TRANSLATE_WHITESPACE_TABLE).lower() for l in lines]) | ||||
|             else: | ||||
|                 if isinstance(lines[0], str): # Can be either str or bytes depending on what was on the disk | ||||
|                     local_lines = set([l.strip().lower() for l in lines]) | ||||
|                 else: | ||||
|                     local_lines = set([l.decode('utf-8').strip().lower() for l in lines]) | ||||
|  | ||||
|  | ||||
|         # Compare each lines (set) against each history text file (set) looking for something new.. | ||||
|         existing_history = set({}) | ||||
|         for k, v in self.history.items(): | ||||
|             content = self.get_history_snapshot(k) | ||||
|             alist = set([line.strip().lower() for line in content.splitlines()]) | ||||
|  | ||||
|             if ignore_whitespace: | ||||
|                 alist = set([line.translate(TRANSLATE_WHITESPACE_TABLE).lower() for line in content.splitlines()]) | ||||
|             else: | ||||
|                 alist = set([line.strip().lower() for line in content.splitlines()]) | ||||
|  | ||||
|             existing_history = existing_history.union(alist) | ||||
|  | ||||
|         # Check that everything in local_lines(new stuff) already exists in existing_history - it should | ||||
| @@ -396,8 +419,8 @@ class model(watch_base): | ||||
|     @property | ||||
|     def watch_data_dir(self): | ||||
|         # The base dir of the watch data | ||||
|         return os.path.join(self.__datastore_path, self['uuid']) | ||||
|      | ||||
|         return os.path.join(self.__datastore_path, self['uuid']) if self.__datastore_path else None | ||||
|  | ||||
|     def get_error_text(self): | ||||
|         """Return the text saved from a previous request that resulted in a non-200 error""" | ||||
|         fname = os.path.join(self.watch_data_dir, "last-error.txt") | ||||
| @@ -509,16 +532,17 @@ class model(watch_base): | ||||
|  | ||||
|     def save_xpath_data(self, data, as_error=False): | ||||
|         import json | ||||
|         import zlib | ||||
|  | ||||
|         if as_error: | ||||
|             target_path = os.path.join(self.watch_data_dir, "elements-error.json") | ||||
|             target_path = os.path.join(str(self.watch_data_dir), "elements-error.deflate") | ||||
|         else: | ||||
|             target_path = os.path.join(self.watch_data_dir, "elements.json") | ||||
|             target_path = os.path.join(str(self.watch_data_dir), "elements.deflate") | ||||
|  | ||||
|         self.ensure_data_dir_exists() | ||||
|  | ||||
|         with open(target_path, 'w') as f: | ||||
|             f.write(json.dumps(data)) | ||||
|         with open(target_path, 'wb') as f: | ||||
|             f.write(zlib.compress(json.dumps(data).encode())) | ||||
|             f.close() | ||||
|  | ||||
|     # Save as PNG, PNG is larger but better for doing visual diff in the future | ||||
|   | ||||
| @@ -59,6 +59,65 @@ class watch_base(dict): | ||||
|             'text_should_not_be_present': [],  # Text that should not present | ||||
|             'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None}, | ||||
|             'time_between_check_use_default': True, | ||||
|             "time_schedule_limit": { | ||||
|                 "enabled": False, | ||||
|                 "monday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|                 "tuesday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|                 "wednesday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|                 "thursday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|                 "friday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|                 "saturday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|                 "sunday": { | ||||
|                     "enabled": True, | ||||
|                     "start_time": "00:00", | ||||
|                     "duration": { | ||||
|                         "hours": "24", | ||||
|                         "minutes": "00" | ||||
|                     } | ||||
|                 }, | ||||
|             }, | ||||
|             'title': None, | ||||
|             'track_ldjson_price_data': None, | ||||
|             'trim_text_whitespace': False, | ||||
|   | ||||
| @@ -23,7 +23,7 @@ valid_tokens = { | ||||
| } | ||||
|  | ||||
| default_notification_format_for_watch = 'System default' | ||||
| default_notification_format = 'Text' | ||||
| default_notification_format = 'HTML Color' | ||||
| default_notification_body = '{{watch_url}} had a change.\n---\n{{diff}}\n---\n' | ||||
| default_notification_title = 'ChangeDetection.io Notification - {{watch_url}}' | ||||
|  | ||||
| @@ -31,6 +31,7 @@ valid_notification_formats = { | ||||
|     'Text': NotifyFormat.TEXT, | ||||
|     'Markdown': NotifyFormat.MARKDOWN, | ||||
|     'HTML': NotifyFormat.HTML, | ||||
|     'HTML Color': 'htmlcolor', | ||||
|     # Used only for editing a watch (not for global) | ||||
|     default_notification_format_for_watch: default_notification_format_for_watch | ||||
| } | ||||
| @@ -66,6 +67,10 @@ def process_notification(n_object, datastore): | ||||
|  | ||||
|     sent_objs = [] | ||||
|     from .apprise_asset import asset | ||||
|  | ||||
|     if 'as_async' in n_object: | ||||
|         asset.async_mode = n_object.get('as_async') | ||||
|  | ||||
|     apobj = apprise.Apprise(debug=True, asset=asset) | ||||
|  | ||||
|     if not n_object.get('notification_urls'): | ||||
| @@ -76,9 +81,16 @@ def process_notification(n_object, datastore): | ||||
|  | ||||
|             # Get the notification body from datastore | ||||
|             n_body = jinja_render(template_str=n_object.get('notification_body', ''), **notification_parameters) | ||||
|             if n_object.get('notification_format', '').startswith('HTML'): | ||||
|                 n_body = n_body.replace("\n", '<br>') | ||||
|  | ||||
|             n_title = jinja_render(template_str=n_object.get('notification_title', ''), **notification_parameters) | ||||
|  | ||||
|             url = url.strip() | ||||
|             if url.startswith('#'): | ||||
|                 logger.trace(f"Skipping commented out notification URL - {url}") | ||||
|                 continue | ||||
|  | ||||
|             if not url: | ||||
|                 logger.warning(f"Process Notification: skipping empty notification URL.") | ||||
|                 continue | ||||
| @@ -149,8 +161,6 @@ def process_notification(n_object, datastore): | ||||
|             attach=n_object.get('screenshot', None) | ||||
|         ) | ||||
|  | ||||
|         # Give apprise time to register an error | ||||
|         time.sleep(3) | ||||
|  | ||||
|         # Returns empty string if nothing found, multi-line string otherwise | ||||
|         log_value = logs.getvalue() | ||||
|   | ||||
| @@ -18,6 +18,7 @@ class difference_detection_processor(): | ||||
|     screenshot = None | ||||
|     watch = None | ||||
|     xpath_data = None | ||||
|     preferred_proxy = None | ||||
|  | ||||
|     def __init__(self, *args, datastore, watch_uuid, **kwargs): | ||||
|         super().__init__(*args, **kwargs) | ||||
| @@ -26,23 +27,24 @@ class difference_detection_processor(): | ||||
|         # Generic fetcher that should be extended (requests, playwright etc) | ||||
|         self.fetcher = Fetcher() | ||||
|  | ||||
|     def call_browser(self): | ||||
|     def call_browser(self, preferred_proxy_id=None): | ||||
|  | ||||
|         from requests.structures import CaseInsensitiveDict | ||||
|  | ||||
|         # Protect against file:// access | ||||
|         if re.search(r'^file://', self.watch.get('url', '').strip(), re.IGNORECASE): | ||||
|         url = self.watch.link | ||||
|  | ||||
|         # Protect against file:, file:/, file:// access, check the real "link" without any meta "source:" etc prepended. | ||||
|         if re.search(r'^file:', url.strip(), re.IGNORECASE): | ||||
|             if not strtobool(os.getenv('ALLOW_FILE_URI', 'false')): | ||||
|                 raise Exception( | ||||
|                     "file:// type access is denied for security reasons." | ||||
|                 ) | ||||
|  | ||||
|         url = self.watch.link | ||||
|  | ||||
|         # Requests, playwright, other browser via wss:// etc, fetch_extra_something | ||||
|         prefer_fetch_backend = self.watch.get('fetch_backend', 'system') | ||||
|  | ||||
|         # Proxy ID "key" | ||||
|         preferred_proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=self.watch.get('uuid')) | ||||
|         preferred_proxy_id = preferred_proxy_id if preferred_proxy_id else self.datastore.get_preferred_proxy_for_watch(uuid=self.watch.get('uuid')) | ||||
|  | ||||
|         # Pluggable content self.fetcher | ||||
|         if not prefer_fetch_backend or prefer_fetch_backend == 'system': | ||||
| @@ -100,6 +102,7 @@ class difference_detection_processor(): | ||||
|             self.fetcher.browser_steps_screenshot_path = os.path.join(self.datastore.datastore_path, self.watch.get('uuid')) | ||||
|  | ||||
|         # Tweak the base config with the per-watch ones | ||||
|         from changedetectionio.safe_jinja import render as jinja_render | ||||
|         request_headers = CaseInsensitiveDict() | ||||
|  | ||||
|         ua = self.datastore.data['settings']['requests'].get('default_ua') | ||||
| @@ -116,9 +119,15 @@ class difference_detection_processor(): | ||||
|         if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']: | ||||
|             request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '') | ||||
|  | ||||
|         for header_name in request_headers: | ||||
|             request_headers.update({header_name: jinja_render(template_str=request_headers.get(header_name))}) | ||||
|  | ||||
|         timeout = self.datastore.data['settings']['requests'].get('timeout') | ||||
|  | ||||
|         request_body = self.watch.get('body') | ||||
|         if request_body: | ||||
|             request_body = jinja_render(template_str=self.watch.get('body')) | ||||
|          | ||||
|         request_method = self.watch.get('method') | ||||
|         ignore_status_codes = self.watch.get('ignore_status_codes', False) | ||||
|  | ||||
| @@ -155,7 +164,7 @@ class difference_detection_processor(): | ||||
|         # After init, call run_changedetection() which will do the actual change-detection | ||||
|  | ||||
|     @abstractmethod | ||||
|     def run_changedetection(self, watch, skip_when_checksum_same: bool = True): | ||||
|     def run_changedetection(self, watch): | ||||
|         update_obj = {'last_notification_error': False, 'last_error': False} | ||||
|         some_data = 'xxxxx' | ||||
|         update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest() | ||||
|   | ||||
| @@ -27,22 +27,27 @@ def _search_prop_by_value(matches, value): | ||||
|                 return prop[1]  # Yield the desired value and exit the function | ||||
|  | ||||
| def _deduplicate_prices(data): | ||||
|     seen = set() | ||||
|     unique_data = [] | ||||
|     import re | ||||
|  | ||||
|     ''' | ||||
|     Some price data has multiple entries, OR it has a single entry with ['$159', '159', 159, "$ 159"] or just "159" | ||||
|     Get all the values, clean it and add it to a set then return the unique values | ||||
|     ''' | ||||
|     unique_data = set() | ||||
|  | ||||
|     # Return the complete 'datum' where its price was not seen before | ||||
|     for datum in data: | ||||
|         # Convert 'value' to float if it can be a numeric string, otherwise leave it as is | ||||
|         try: | ||||
|             normalized_value = float(datum.value) if isinstance(datum.value, str) and datum.value.replace('.', '', 1).isdigit() else datum.value | ||||
|         except ValueError: | ||||
|             normalized_value = datum.value | ||||
|  | ||||
|         # If the normalized value hasn't been seen yet, add it to unique data | ||||
|         if normalized_value not in seen: | ||||
|             unique_data.append(datum) | ||||
|             seen.add(normalized_value) | ||||
|      | ||||
|     return unique_data | ||||
|         if isinstance(datum.value, list): | ||||
|             # Process each item in the list | ||||
|             normalized_value = set([float(re.sub(r'[^\d.]', '', str(item))) for item in datum.value if str(item).strip()]) | ||||
|             unique_data.update(normalized_value) | ||||
|         else: | ||||
|             # Process single value | ||||
|             v = float(re.sub(r'[^\d.]', '', str(datum.value))) | ||||
|             unique_data.add(v) | ||||
|  | ||||
|     return list(unique_data) | ||||
|  | ||||
|  | ||||
| # should return Restock() | ||||
| @@ -83,14 +88,13 @@ def get_itemprop_availability(html_content) -> Restock: | ||||
|         if price_result: | ||||
|             # Right now, we just support single product items, maybe we will store the whole actual metadata seperately in teh future and | ||||
|             # parse that for the UI? | ||||
|             prices_found = set(str(item.value).replace('$', '') for item in price_result) | ||||
|             if len(price_result) > 1 and len(prices_found) > 1: | ||||
|             if len(price_result) > 1 and len(price_result) > 1: | ||||
|                 # See of all prices are different, in the case that one product has many embedded data types with the same price | ||||
|                 # One might have $121.95 and another 121.95 etc | ||||
|                 logger.warning(f"More than one price found {prices_found}, throwing exception, cant use this plugin.") | ||||
|                 logger.warning(f"More than one price found {price_result}, throwing exception, cant use this plugin.") | ||||
|                 raise MoreThanOnePriceFound() | ||||
|  | ||||
|             value['price'] = price_result[0].value | ||||
|             value['price'] = price_result[0] | ||||
|  | ||||
|         pricecurrency_result = pricecurrency_parse.find(data) | ||||
|         if pricecurrency_result: | ||||
| @@ -140,7 +144,7 @@ class perform_site_check(difference_detection_processor): | ||||
|     screenshot = None | ||||
|     xpath_data = None | ||||
|  | ||||
|     def run_changedetection(self, watch, skip_when_checksum_same=True): | ||||
|     def run_changedetection(self, watch): | ||||
|         import hashlib | ||||
|  | ||||
|         if not watch: | ||||
| @@ -220,7 +224,7 @@ class perform_site_check(difference_detection_processor): | ||||
|             itemprop_availability['original_price'] = itemprop_availability.get('price') | ||||
|             update_obj['restock']["original_price"] = itemprop_availability.get('price') | ||||
|  | ||||
|         if not self.fetcher.instock_data and not itemprop_availability.get('availability'): | ||||
|         if not self.fetcher.instock_data and not itemprop_availability.get('availability') and not itemprop_availability.get('price'): | ||||
|             raise ProcessorException( | ||||
|                 message=f"Unable to extract restock data for this page unfortunately. (Got code {self.fetcher.get_last_status_code()} from server), no embedded stock information was found and nothing interesting in the text, try using this watch with Chrome.", | ||||
|                 url=watch.get('url'), | ||||
| @@ -237,6 +241,14 @@ class perform_site_check(difference_detection_processor): | ||||
|             update_obj['restock']["in_stock"] = True if self.fetcher.instock_data == 'Possibly in stock' else False | ||||
|             logger.debug(f"Watch UUID {watch.get('uuid')} restock check returned instock_data - '{self.fetcher.instock_data}' from JS scraper.") | ||||
|  | ||||
|         # Very often websites will lie about the 'availability' in the metadata, so if the scraped version says its NOT in stock, use that. | ||||
|         if self.fetcher.instock_data and self.fetcher.instock_data != 'Possibly in stock': | ||||
|             if update_obj['restock'].get('in_stock'): | ||||
|                 logger.warning( | ||||
|                     f"Lie detected in the availability machine data!! when scraping said its not in stock!! itemprop was '{itemprop_availability}' and scraped from browser was '{self.fetcher.instock_data}' update obj was {update_obj['restock']} ") | ||||
|                 logger.warning(f"Setting instock to FALSE, scraper found '{self.fetcher.instock_data}' in the body but metadata reported not-in-stock") | ||||
|                 update_obj['restock']["in_stock"] = False | ||||
|  | ||||
|         # What we store in the snapshot | ||||
|         price = update_obj.get('restock').get('price') if update_obj.get('restock').get('price') else "" | ||||
|         snapshot_content = f"In Stock: {update_obj.get('restock').get('in_stock')} - Price: {price}" | ||||
| @@ -299,4 +311,4 @@ class perform_site_check(difference_detection_processor): | ||||
|         # Always record the new checksum | ||||
|         update_obj["previous_md5"] = fetched_md5 | ||||
|  | ||||
|         return changed_detected, update_obj, snapshot_content.encode('utf-8').strip() | ||||
|         return changed_detected, update_obj, snapshot_content.strip() | ||||
|   | ||||
| @@ -0,0 +1,115 @@ | ||||
|  | ||||
| from loguru import logger | ||||
|  | ||||
|  | ||||
|  | ||||
| def _task(watch, update_handler): | ||||
|     from changedetectionio.content_fetchers.exceptions import ReplyWithContentButNoText | ||||
|     from changedetectionio.processors.text_json_diff.processor import FilterNotFoundInResponse | ||||
|  | ||||
|     text_after_filter = '' | ||||
|  | ||||
|     try: | ||||
|         # The slow process (we run 2 of these in parallel) | ||||
|         changed_detected, update_obj, text_after_filter = update_handler.run_changedetection(watch=watch) | ||||
|     except FilterNotFoundInResponse as e: | ||||
|         text_after_filter = f"Filter not found in HTML: {str(e)}" | ||||
|     except ReplyWithContentButNoText as e: | ||||
|         text_after_filter = f"Filter found but no text (empty result)" | ||||
|     except Exception as e: | ||||
|         text_after_filter = f"Error: {str(e)}" | ||||
|  | ||||
|     if not text_after_filter.strip(): | ||||
|         text_after_filter = 'Empty content' | ||||
|  | ||||
|     # because run_changedetection always returns bytes due to saving the snapshots etc | ||||
|     text_after_filter = text_after_filter.decode('utf-8') if isinstance(text_after_filter, bytes) else text_after_filter | ||||
|  | ||||
|     return text_after_filter | ||||
|  | ||||
|  | ||||
| def prepare_filter_prevew(datastore, watch_uuid): | ||||
|     '''Used by @app.route("/edit/<string:uuid>/preview-rendered", methods=['POST'])''' | ||||
|     from changedetectionio import forms, html_tools | ||||
|     from changedetectionio.model.Watch import model as watch_model | ||||
|     from concurrent.futures import ProcessPoolExecutor | ||||
|     from copy import deepcopy | ||||
|     from flask import request, jsonify | ||||
|     import brotli | ||||
|     import importlib | ||||
|     import os | ||||
|     import time | ||||
|     now = time.time() | ||||
|  | ||||
|     text_after_filter = '' | ||||
|     text_before_filter = '' | ||||
|     trigger_line_numbers = [] | ||||
|     ignore_line_numbers = [] | ||||
|  | ||||
|     tmp_watch = deepcopy(datastore.data['watching'].get(watch_uuid)) | ||||
|  | ||||
|     if tmp_watch and tmp_watch.history and os.path.isdir(tmp_watch.watch_data_dir): | ||||
|         # Splice in the temporary stuff from the form | ||||
|         form = forms.processor_text_json_diff_form(formdata=request.form if request.method == 'POST' else None, | ||||
|                                                    data=request.form | ||||
|                                                    ) | ||||
|  | ||||
|         # Only update vars that came in via the AJAX post | ||||
|         p = {k: v for k, v in form.data.items() if k in request.form.keys()} | ||||
|         tmp_watch.update(p) | ||||
|         blank_watch_no_filters = watch_model() | ||||
|         blank_watch_no_filters['url'] = tmp_watch.get('url') | ||||
|  | ||||
|         latest_filename = next(reversed(tmp_watch.history)) | ||||
|         html_fname = os.path.join(tmp_watch.watch_data_dir, f"{latest_filename}.html.br") | ||||
|         with open(html_fname, 'rb') as f: | ||||
|             decompressed_data = brotli.decompress(f.read()).decode('utf-8') if html_fname.endswith('.br') else f.read().decode('utf-8') | ||||
|  | ||||
|             # Just like a normal change detection except provide a fake "watch" object and dont call .call_browser() | ||||
|             processor_module = importlib.import_module("changedetectionio.processors.text_json_diff.processor") | ||||
|             update_handler = processor_module.perform_site_check(datastore=datastore, | ||||
|                                                                  watch_uuid=tmp_watch.get('uuid')  # probably not needed anymore anyway? | ||||
|                                                                  ) | ||||
|             # Use the last loaded HTML as the input | ||||
|             update_handler.datastore = datastore | ||||
|             update_handler.fetcher.content = str(decompressed_data) # str() because playwright/puppeteer/requests return string | ||||
|             update_handler.fetcher.headers['content-type'] = tmp_watch.get('content-type') | ||||
|  | ||||
|             # Process our watch with filters and the HTML from disk, and also a blank watch with no filters but also with the same HTML from disk | ||||
|             # Do this as a parallel process because it could take some time | ||||
|             with ProcessPoolExecutor(max_workers=2) as executor: | ||||
|                 future1 = executor.submit(_task, tmp_watch, update_handler) | ||||
|                 future2 = executor.submit(_task, blank_watch_no_filters, update_handler) | ||||
|  | ||||
|                 text_after_filter = future1.result() | ||||
|                 text_before_filter = future2.result() | ||||
|  | ||||
|     try: | ||||
|         trigger_line_numbers = html_tools.strip_ignore_text(content=text_after_filter, | ||||
|                                                             wordlist=tmp_watch['trigger_text'], | ||||
|                                                             mode='line numbers' | ||||
|                                                             ) | ||||
|     except Exception as e: | ||||
|         text_before_filter = f"Error: {str(e)}" | ||||
|  | ||||
|     try: | ||||
|         text_to_ignore = tmp_watch.get('ignore_text', []) + datastore.data['settings']['application'].get('global_ignore_text', []) | ||||
|         ignore_line_numbers = html_tools.strip_ignore_text(content=text_after_filter, | ||||
|                                                            wordlist=text_to_ignore, | ||||
|                                                            mode='line numbers' | ||||
|                                                            ) | ||||
|     except Exception as e: | ||||
|         text_before_filter = f"Error: {str(e)}" | ||||
|  | ||||
|     logger.trace(f"Parsed in {time.time() - now:.3f}s") | ||||
|  | ||||
|     return jsonify( | ||||
|         { | ||||
|             'after_filter': text_after_filter, | ||||
|             'before_filter': text_before_filter.decode('utf-8') if isinstance(text_before_filter, bytes) else text_before_filter, | ||||
|             'duration': time.time() - now, | ||||
|             'trigger_line_numbers': trigger_line_numbers, | ||||
|             'ignore_line_numbers': ignore_line_numbers, | ||||
|         } | ||||
|     ) | ||||
|  | ||||
|   | ||||
| @@ -7,7 +7,7 @@ import re | ||||
| import urllib3 | ||||
|  | ||||
| from changedetectionio.processors import difference_detection_processor | ||||
| from changedetectionio.html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text | ||||
| from changedetectionio.html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text, TRANSLATE_WHITESPACE_TABLE | ||||
| from changedetectionio import html_tools, content_fetchers | ||||
| from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT | ||||
| from loguru import logger | ||||
| @@ -35,8 +35,7 @@ class PDFToHTMLToolNotFound(ValueError): | ||||
| # (set_proxy_from_list) | ||||
| class perform_site_check(difference_detection_processor): | ||||
|  | ||||
|     def run_changedetection(self, watch, skip_when_checksum_same=True): | ||||
|  | ||||
|     def run_changedetection(self, watch): | ||||
|         changed_detected = False | ||||
|         html_content = "" | ||||
|         screenshot = False  # as bytes | ||||
| @@ -59,9 +58,6 @@ class perform_site_check(difference_detection_processor): | ||||
|         # Watches added automatically in the queue manager will skip if its the same checksum as the previous run | ||||
|         # Saves a lot of CPU | ||||
|         update_obj['previous_md5_before_filters'] = hashlib.md5(self.fetcher.content.encode('utf-8')).hexdigest() | ||||
|         if skip_when_checksum_same: | ||||
|             if update_obj['previous_md5_before_filters'] == watch.get('previous_md5_before_filters'): | ||||
|                 raise content_fetchers.exceptions.checksumFromPreviousCheckWasTheSame() | ||||
|  | ||||
|         # Fetching complete, now filters | ||||
|  | ||||
| @@ -205,22 +201,14 @@ class perform_site_check(difference_detection_processor): | ||||
|         if watch.get('trim_text_whitespace'): | ||||
|             stripped_text_from_html = '\n'.join(line.strip() for line in stripped_text_from_html.replace("\n\n", "\n").splitlines()) | ||||
|  | ||||
|         if watch.get('remove_duplicate_lines'): | ||||
|             stripped_text_from_html = '\n'.join(dict.fromkeys(line for line in stripped_text_from_html.replace("\n\n", "\n").splitlines())) | ||||
|  | ||||
|         if watch.get('sort_text_alphabetically'): | ||||
|             # Note: Because a <p>something</p> will add an extra line feed to signify the paragraph gap | ||||
|             # we end up with 'Some text\n\n', sorting will add all those extra \n at the start, so we remove them here. | ||||
|             stripped_text_from_html = stripped_text_from_html.replace("\n\n", "\n") | ||||
|             stripped_text_from_html = '\n'.join(sorted(stripped_text_from_html.splitlines(), key=lambda x: x.lower())) | ||||
|  | ||||
|         # Re #340 - return the content before the 'ignore text' was applied | ||||
|         # Also used to calculate/show what was removed | ||||
|         text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8') | ||||
|         text_content_before_ignored_filter = stripped_text_from_html | ||||
|  | ||||
|         # @todo whitespace coming from missing rtrim()? | ||||
|         # stripped_text_from_html could be based on their preferences, replace the processed text with only that which they want to know about. | ||||
|         # Rewrite's the processing text based on only what diff result they want to see | ||||
|  | ||||
|         if watch.has_special_diff_filter_options_set() and len(watch.history.keys()): | ||||
|             # Now the content comes from the diff-parser and not the returned HTTP traffic, so could be some differences | ||||
|             from changedetectionio import diff | ||||
| @@ -235,12 +223,12 @@ class perform_site_check(difference_detection_processor): | ||||
|                                              line_feed_sep="\n", | ||||
|                                              include_change_type_prefix=False) | ||||
|  | ||||
|             watch.save_last_text_fetched_before_filters(text_content_before_ignored_filter) | ||||
|             watch.save_last_text_fetched_before_filters(text_content_before_ignored_filter.encode('utf-8')) | ||||
|  | ||||
|             if not rendered_diff and stripped_text_from_html: | ||||
|                 # We had some content, but no differences were found | ||||
|                 # Store our new file as the MD5 so it will trigger in the future | ||||
|                 c = hashlib.md5(stripped_text_from_html.encode('utf-8').translate(None, b'\r\n\t ')).hexdigest() | ||||
|                 c = hashlib.md5(stripped_text_from_html.translate(TRANSLATE_WHITESPACE_TABLE).encode('utf-8')).hexdigest() | ||||
|                 return False, {'previous_md5': c}, stripped_text_from_html.encode('utf-8') | ||||
|             else: | ||||
|                 stripped_text_from_html = rendered_diff | ||||
| @@ -261,14 +249,6 @@ class perform_site_check(difference_detection_processor): | ||||
|  | ||||
|         update_obj["last_check_status"] = self.fetcher.get_last_status_code() | ||||
|  | ||||
|         # If there's text to skip | ||||
|         # @todo we could abstract out the get_text() to handle this cleaner | ||||
|         text_to_ignore = watch.get('ignore_text', []) + self.datastore.data['settings']['application'].get('global_ignore_text', []) | ||||
|         if len(text_to_ignore): | ||||
|             stripped_text_from_html = html_tools.strip_ignore_text(stripped_text_from_html, text_to_ignore) | ||||
|         else: | ||||
|             stripped_text_from_html = stripped_text_from_html.encode('utf8') | ||||
|  | ||||
|         # 615 Extract text by regex | ||||
|         extract_text = watch.get('extract_text', []) | ||||
|         if len(extract_text) > 0: | ||||
| @@ -277,39 +257,53 @@ class perform_site_check(difference_detection_processor): | ||||
|                 # incase they specified something in '/.../x' | ||||
|                 if re.search(PERL_STYLE_REGEX, s_re, re.IGNORECASE): | ||||
|                     regex = html_tools.perl_style_slash_enclosed_regex_to_options(s_re) | ||||
|                     result = re.findall(regex.encode('utf-8'), stripped_text_from_html) | ||||
|                     result = re.findall(regex, stripped_text_from_html) | ||||
|  | ||||
|                     for l in result: | ||||
|                         if type(l) is tuple: | ||||
|                             # @todo - some formatter option default (between groups) | ||||
|                             regex_matched_output += list(l) + [b'\n'] | ||||
|                             regex_matched_output += list(l) + ['\n'] | ||||
|                         else: | ||||
|                             # @todo - some formatter option default (between each ungrouped result) | ||||
|                             regex_matched_output += [l] + [b'\n'] | ||||
|                             regex_matched_output += [l] + ['\n'] | ||||
|                 else: | ||||
|                     # Doesnt look like regex, just hunt for plaintext and return that which matches | ||||
|                     # `stripped_text_from_html` will be bytes, so we must encode s_re also to bytes | ||||
|                     r = re.compile(re.escape(s_re.encode('utf-8')), re.IGNORECASE) | ||||
|                     r = re.compile(re.escape(s_re), re.IGNORECASE) | ||||
|                     res = r.findall(stripped_text_from_html) | ||||
|                     if res: | ||||
|                         for match in res: | ||||
|                             regex_matched_output += [match] + [b'\n'] | ||||
|                             regex_matched_output += [match] + ['\n'] | ||||
|  | ||||
|             ########################################################## | ||||
|             stripped_text_from_html = b'' | ||||
|             text_content_before_ignored_filter = b'' | ||||
|             stripped_text_from_html = '' | ||||
|  | ||||
|             if regex_matched_output: | ||||
|                 # @todo some formatter for presentation? | ||||
|                 stripped_text_from_html = b''.join(regex_matched_output) | ||||
|                 text_content_before_ignored_filter = stripped_text_from_html | ||||
|                 stripped_text_from_html = ''.join(regex_matched_output) | ||||
|  | ||||
|         if watch.get('remove_duplicate_lines'): | ||||
|             stripped_text_from_html = '\n'.join(dict.fromkeys(line for line in stripped_text_from_html.replace("\n\n", "\n").splitlines())) | ||||
|  | ||||
|  | ||||
|         if watch.get('sort_text_alphabetically'): | ||||
|             # Note: Because a <p>something</p> will add an extra line feed to signify the paragraph gap | ||||
|             # we end up with 'Some text\n\n', sorting will add all those extra \n at the start, so we remove them here. | ||||
|             stripped_text_from_html = stripped_text_from_html.replace("\n\n", "\n") | ||||
|             stripped_text_from_html = '\n'.join(sorted(stripped_text_from_html.splitlines(), key=lambda x: x.lower())) | ||||
|  | ||||
| ### CALCULATE MD5 | ||||
|         # If there's text to ignore | ||||
|         text_to_ignore = watch.get('ignore_text', []) + self.datastore.data['settings']['application'].get('global_ignore_text', []) | ||||
|         text_for_checksuming = stripped_text_from_html | ||||
|         if text_to_ignore: | ||||
|             text_for_checksuming = html_tools.strip_ignore_text(stripped_text_from_html, text_to_ignore) | ||||
|  | ||||
|         # Re #133 - if we should strip whitespaces from triggering the change detected comparison | ||||
|         if self.datastore.data['settings']['application'].get('ignore_whitespace', False): | ||||
|             fetched_md5 = hashlib.md5(stripped_text_from_html.translate(None, b'\r\n\t ')).hexdigest() | ||||
|         if text_for_checksuming and self.datastore.data['settings']['application'].get('ignore_whitespace', False): | ||||
|             fetched_md5 = hashlib.md5(text_for_checksuming.translate(TRANSLATE_WHITESPACE_TABLE).encode('utf-8')).hexdigest() | ||||
|         else: | ||||
|             fetched_md5 = hashlib.md5(stripped_text_from_html).hexdigest() | ||||
|             fetched_md5 = hashlib.md5(text_for_checksuming.encode('utf-8')).hexdigest() | ||||
|  | ||||
|         ############ Blocking rules, after checksum ################# | ||||
|         blocked = False | ||||
| @@ -337,19 +331,33 @@ class perform_site_check(difference_detection_processor): | ||||
|             if result: | ||||
|                 blocked = True | ||||
|  | ||||
|         # The main thing that all this at the moment comes down to :) | ||||
|         if watch.get('previous_md5') != fetched_md5: | ||||
|             changed_detected = True | ||||
|  | ||||
|         # Looks like something changed, but did it match all the rules? | ||||
|         if blocked: | ||||
|             changed_detected = False | ||||
|         else: | ||||
|             # The main thing that all this at the moment comes down to :) | ||||
|             if watch.get('previous_md5') != fetched_md5: | ||||
|                 changed_detected = True | ||||
|  | ||||
|             # Always record the new checksum | ||||
|             update_obj["previous_md5"] = fetched_md5 | ||||
|  | ||||
|             # On the first run of a site, watch['previous_md5'] will be None, set it the current one. | ||||
|             if not watch.get('previous_md5'): | ||||
|                 watch['previous_md5'] = fetched_md5 | ||||
|  | ||||
|         logger.debug(f"Watch UUID {watch.get('uuid')} content check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}") | ||||
|  | ||||
|         if changed_detected: | ||||
|             if watch.get('check_unique_lines', False): | ||||
|                 has_unique_lines = watch.lines_contain_something_unique_compared_to_history(lines=stripped_text_from_html.splitlines()) | ||||
|                 ignore_whitespace = self.datastore.data['settings']['application'].get('ignore_whitespace') | ||||
|  | ||||
|                 has_unique_lines = watch.lines_contain_something_unique_compared_to_history( | ||||
|                     lines=stripped_text_from_html.splitlines(), | ||||
|                     ignore_whitespace=ignore_whitespace | ||||
|                 ) | ||||
|  | ||||
|                 # One or more lines? unsure? | ||||
|                 if not has_unique_lines: | ||||
|                     logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} didnt have anything new setting change_detected=False") | ||||
| @@ -357,12 +365,6 @@ class perform_site_check(difference_detection_processor): | ||||
|                 else: | ||||
|                     logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} had unique content") | ||||
|  | ||||
|         # Always record the new checksum | ||||
|         update_obj["previous_md5"] = fetched_md5 | ||||
|  | ||||
|         # On the first run of a site, watch['previous_md5'] will be None, set it the current one. | ||||
|         if not watch.get('previous_md5'): | ||||
|             watch['previous_md5'] = fetched_md5 | ||||
|  | ||||
|         # stripped_text_from_html - Everything after filters and NO 'ignored' content | ||||
|         return changed_detected, update_obj, stripped_text_from_html | ||||
|   | ||||
							
								
								
									
										225
									
								
								changedetectionio/static/images/schedule.svg
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										225
									
								
								changedetectionio/static/images/schedule.svg
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,225 @@ | ||||
| <?xml version="1.0" encoding="UTF-8" standalone="no"?> | ||||
| <svg | ||||
|    version="1.1" | ||||
|    id="Layer_1" | ||||
|    x="0px" | ||||
|    y="0px" | ||||
|    viewBox="0 0 661.20001 665.40002" | ||||
|    xml:space="preserve" | ||||
|    width="661.20001" | ||||
|    height="665.40002" | ||||
|    inkscape:version="1.1.2 (0a00cf5339, 2022-02-04)" | ||||
|    sodipodi:docname="schedule.svg" | ||||
|    xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | ||||
|    xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | ||||
|    xmlns="http://www.w3.org/2000/svg" | ||||
|    xmlns:svg="http://www.w3.org/2000/svg"><defs | ||||
|    id="defs77" /><sodipodi:namedview | ||||
|    id="namedview75" | ||||
|    pagecolor="#ffffff" | ||||
|    bordercolor="#666666" | ||||
|    borderopacity="1.0" | ||||
|    inkscape:pageshadow="2" | ||||
|    inkscape:pageopacity="0.0" | ||||
|    inkscape:pagecheckerboard="0" | ||||
|    showgrid="false" | ||||
|    fit-margin-top="0" | ||||
|    fit-margin-left="0" | ||||
|    fit-margin-right="0" | ||||
|    fit-margin-bottom="0" | ||||
|    inkscape:zoom="1.2458671" | ||||
|    inkscape:cx="300.59386" | ||||
|    inkscape:cy="332.29869" | ||||
|    inkscape:window-width="1920" | ||||
|    inkscape:window-height="1051" | ||||
|    inkscape:window-x="1920" | ||||
|    inkscape:window-y="0" | ||||
|    inkscape:window-maximized="1" | ||||
|    inkscape:current-layer="g72" /> <style | ||||
|    type="text/css" | ||||
|    id="style2"> .st0{fill:#FFFFFF;} .st1{fill:#C1272D;} .st2{fill:#991D26;} .st3{fill:#CCCCCC;} .st4{fill:#E6E6E6;} .st5{fill:#F7931E;} .st6{fill:#F2F2F2;} .st7{fill:none;stroke:#999999;stroke-width:17.9763;stroke-linecap:round;stroke-miterlimit:10;} .st8{fill:none;stroke:#333333;stroke-width:8.9882;stroke-linecap:round;stroke-miterlimit:10;} .st9{fill:none;stroke:#C1272D;stroke-width:5.9921;stroke-linecap:round;stroke-miterlimit:10;} .st10{fill:#245F7F;} </style> <g | ||||
|    id="g72" | ||||
|    transform="translate(-149.4,-147.3)"> <path | ||||
|    class="st0" | ||||
|    d="M 601.2,699.8 H 205 c -30.7,0 -55.6,-24.9 -55.6,-55.6 V 248 c 0,-30.7 24.9,-55.6 55.6,-55.6 h 396.2 c 30.7,0 55.6,24.9 55.6,55.6 v 396.2 c 0,30.7 -24.9,55.6 -55.6,55.6 z" | ||||
|    id="path4" | ||||
|    style="fill:#dfdfdf;fill-opacity:1" /> <path | ||||
|    class="st1" | ||||
|    d="M 601.2,192.4 H 205 c -30.7,0 -55.6,24.9 -55.6,55.6 v 88.5 H 656.8 V 248 c 0,-30.7 -24.9,-55.6 -55.6,-55.6 z" | ||||
|    id="path6" | ||||
|    style="fill:#d62128;fill-opacity:1" /> <circle | ||||
|    class="st2" | ||||
|    cx="253.3" | ||||
|    cy="264.5" | ||||
|    r="36.700001" | ||||
|    id="circle8" /> <circle | ||||
|    class="st2" | ||||
|    cx="551.59998" | ||||
|    cy="264.5" | ||||
|    r="36.700001" | ||||
|    id="circle10" /> <path | ||||
|    class="st3" | ||||
|    d="m 253.3,275.7 v 0 c -11.8,0 -21.3,-9.6 -21.3,-21.3 v -85.8 c 0,-11.8 9.6,-21.3 21.3,-21.3 v 0 c 11.8,0 21.3,9.6 21.3,21.3 v 85.8 c 0,11.8 -9.5,21.3 -21.3,21.3 z" | ||||
|    id="path12" /> <path | ||||
|    class="st3" | ||||
|    d="m 551.6,275.7 v 0 c -11.8,0 -21.3,-9.6 -21.3,-21.3 v -85.8 c 0,-11.8 9.6,-21.3 21.3,-21.3 v 0 c 11.8,0 21.3,9.6 21.3,21.3 v 85.8 c 0.1,11.8 -9.5,21.3 -21.3,21.3 z" | ||||
|    id="path14" /> <rect | ||||
|    x="215.7" | ||||
|    y="370.89999" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect16" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="313" | ||||
|    y="370.89999" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect18" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="410.20001" | ||||
|    y="370.89999" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect20" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="507.5" | ||||
|    y="370.89999" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect22" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="215.7" | ||||
|    y="465" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect24" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="313" | ||||
|    y="465" | ||||
|    class="st1" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect26" | ||||
|    style="fill:#27c12b;fill-opacity:1" /> <rect | ||||
|    x="410.20001" | ||||
|    y="465" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect28" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="507.5" | ||||
|    y="465" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect30" /> <rect | ||||
|    x="215.7" | ||||
|    y="559.09998" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect32" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="313" | ||||
|    y="559.09998" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect34" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="410.20001" | ||||
|    y="559.09998" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect36" | ||||
|    style="fill:#ffffff;fill-opacity:1" /> <rect | ||||
|    x="507.5" | ||||
|    y="559.09998" | ||||
|    class="st4" | ||||
|    width="75.199997" | ||||
|    height="75.199997" | ||||
|    id="rect38" /> <g | ||||
|    id="g70"> <circle | ||||
|    class="st5" | ||||
|    cx="621.90002" | ||||
|    cy="624" | ||||
|    r="188.7" | ||||
|    id="circle40" /> <circle | ||||
|    class="st0" | ||||
|    cx="621.90002" | ||||
|    cy="624" | ||||
|    r="148" | ||||
|    id="circle42" /> <path | ||||
|    class="st6" | ||||
|    d="m 486.6,636.8 c 0,-81.7 66.3,-148 148,-148 37.6,0 72,14.1 98.1,37.2 -27.1,-30.6 -66.7,-49.9 -110.8,-49.9 -81.7,0 -148,66.3 -148,148 0,44.1 19.3,83.7 49.9,110.8 -23.1,-26.2 -37.2,-60.5 -37.2,-98.1 z" | ||||
|    id="path44" /> <polyline | ||||
|    class="st7" | ||||
|    points="621.9,530.4 621.9,624 559,624  " | ||||
|    id="polyline46" /> <g | ||||
|    id="g64"> <line | ||||
|    class="st8" | ||||
|    x1="621.90002" | ||||
|    y1="508.29999" | ||||
|    x2="621.90002" | ||||
|    y2="497.10001" | ||||
|    id="line48" /> <line | ||||
|    class="st8" | ||||
|    x1="621.90002" | ||||
|    y1="756.29999" | ||||
|    x2="621.90002" | ||||
|    y2="745.09998" | ||||
|    id="line50" /> <line | ||||
|    class="st8" | ||||
|    x1="740.29999" | ||||
|    y1="626.70001" | ||||
|    x2="751.5" | ||||
|    y2="626.70001" | ||||
|    id="line52" /> <line | ||||
|    class="st8" | ||||
|    x1="492.29999" | ||||
|    y1="626.70001" | ||||
|    x2="503.5" | ||||
|    y2="626.70001" | ||||
|    id="line54" /> <line | ||||
|    class="st8" | ||||
|    x1="705.59998" | ||||
|    y1="710.40002" | ||||
|    x2="713.5" | ||||
|    y2="718.29999" | ||||
|    id="line56" /> <line | ||||
|    class="st8" | ||||
|    x1="530.29999" | ||||
|    y1="535.09998" | ||||
|    x2="538.20001" | ||||
|    y2="543" | ||||
|    id="line58" /> <line | ||||
|    class="st8" | ||||
|    x1="538.20001" | ||||
|    y1="710.40002" | ||||
|    x2="530.29999" | ||||
|    y2="718.29999" | ||||
|    id="line60" /> <line | ||||
|    class="st8" | ||||
|    x1="713.5" | ||||
|    y1="535.09998" | ||||
|    x2="705.59998" | ||||
|    y2="543" | ||||
|    id="line62" /> </g> <line | ||||
|    class="st9" | ||||
|    x1="604.40002" | ||||
|    y1="606.29999" | ||||
|    x2="684.5" | ||||
|    y2="687.40002" | ||||
|    id="line66" /> <circle | ||||
|    class="st10" | ||||
|    cx="621.90002" | ||||
|    cy="624" | ||||
|    r="16.1" | ||||
|    id="circle68" /> </g> </g> </svg> | ||||
| After Width: | Height: | Size: 5.9 KiB | 
| @@ -24,5 +24,19 @@ $(document).ready(function () { | ||||
|         $(target).toggle(); | ||||
|     }); | ||||
|  | ||||
|     // Time zone config related | ||||
|     $(".local-time").each(function (e) { | ||||
|         $(this).text(new Date($(this).data("utc")).toLocaleString()); | ||||
|     }) | ||||
|  | ||||
|     const timezoneInput = $('#application-timezone'); | ||||
|     if(timezoneInput.length) { | ||||
|         const timezone = Intl.DateTimeFormat().resolvedOptions().timeZone; | ||||
|         if (!timezoneInput.val().trim()) { | ||||
|             timezoneInput.val(timezone); | ||||
|             timezoneInput.after('<div class="timezone-message">The timezone was set from your browser, <strong>be sure to press save!</strong></div>'); | ||||
|         } | ||||
|     } | ||||
|  | ||||
| }); | ||||
|  | ||||
|   | ||||
| @@ -1,56 +0,0 @@ | ||||
| /** | ||||
|  * debounce | ||||
|  * @param {integer} milliseconds This param indicates the number of milliseconds | ||||
|  *     to wait after the last call before calling the original function. | ||||
|  * @param {object} What "this" refers to in the returned function. | ||||
|  * @return {function} This returns a function that when called will wait the | ||||
|  *     indicated number of milliseconds after the last call before | ||||
|  *     calling the original function. | ||||
|  */ | ||||
| Function.prototype.debounce = function (milliseconds, context) { | ||||
|     var baseFunction = this, | ||||
|         timer = null, | ||||
|         wait = milliseconds; | ||||
|  | ||||
|     return function () { | ||||
|         var self = context || this, | ||||
|             args = arguments; | ||||
|  | ||||
|         function complete() { | ||||
|             baseFunction.apply(self, args); | ||||
|             timer = null; | ||||
|         } | ||||
|  | ||||
|         if (timer) { | ||||
|             clearTimeout(timer); | ||||
|         } | ||||
|  | ||||
|         timer = setTimeout(complete, wait); | ||||
|     }; | ||||
| }; | ||||
|  | ||||
| /** | ||||
| * throttle | ||||
| * @param {integer} milliseconds This param indicates the number of milliseconds | ||||
| *     to wait between calls before calling the original function. | ||||
| * @param {object} What "this" refers to in the returned function. | ||||
| * @return {function} This returns a function that when called will wait the | ||||
| *     indicated number of milliseconds between calls before | ||||
| *     calling the original function. | ||||
| */ | ||||
| Function.prototype.throttle = function (milliseconds, context) { | ||||
|     var baseFunction = this, | ||||
|         lastEventTimestamp = null, | ||||
|         limit = milliseconds; | ||||
|  | ||||
|     return function () { | ||||
|         var self = context || this, | ||||
|             args = arguments, | ||||
|             now = Date.now(); | ||||
|  | ||||
|         if (!lastEventTimestamp || now - lastEventTimestamp >= limit) { | ||||
|             lastEventTimestamp = now; | ||||
|             baseFunction.apply(self, args); | ||||
|         } | ||||
|     }; | ||||
| }; | ||||
| @@ -1,45 +1,52 @@ | ||||
| $(document).ready(function() { | ||||
| $(document).ready(function () { | ||||
|  | ||||
|   $('#add-email-helper').click(function (e) { | ||||
|     e.preventDefault(); | ||||
|     email = prompt("Destination email"); | ||||
|     if(email) { | ||||
|       var n = $(".notification-urls"); | ||||
|       var p=email_notification_prefix; | ||||
|       $(n).val( $.trim( $(n).val() )+"\n"+email_notification_prefix+email ); | ||||
|     } | ||||
|   }); | ||||
|  | ||||
|   $('#send-test-notification').click(function (e) { | ||||
|     e.preventDefault(); | ||||
|  | ||||
|     data = { | ||||
|       notification_body: $('#notification_body').val(), | ||||
|       notification_format: $('#notification_format').val(), | ||||
|       notification_title: $('#notification_title').val(), | ||||
|       notification_urls: $('.notification-urls').val(), | ||||
|       tags: $('#tags').val(), | ||||
|       window_url: window.location.href, | ||||
|     } | ||||
|  | ||||
|  | ||||
|     $.ajax({ | ||||
|       type: "POST", | ||||
|       url: notification_base_url, | ||||
|       data : data, | ||||
|         statusCode: { | ||||
|         400: function() { | ||||
|             // More than likely the CSRF token was lost when the server restarted | ||||
|           alert("There was a problem processing the request, please reload the page."); | ||||
|     $('#add-email-helper').click(function (e) { | ||||
|         e.preventDefault(); | ||||
|         email = prompt("Destination email"); | ||||
|         if (email) { | ||||
|             var n = $(".notification-urls"); | ||||
|             var p = email_notification_prefix; | ||||
|             $(n).val($.trim($(n).val()) + "\n" + email_notification_prefix + email); | ||||
|         } | ||||
|       } | ||||
|     }).done(function(data){ | ||||
|       console.log(data); | ||||
|       alert(data); | ||||
|     }).fail(function(data){ | ||||
|       console.log(data); | ||||
|       alert('There was an error communicating with the server.'); | ||||
|     }) | ||||
|   }); | ||||
|     }); | ||||
|  | ||||
|     $('#send-test-notification').click(function (e) { | ||||
|         e.preventDefault(); | ||||
|  | ||||
|         data = { | ||||
|             notification_body: $('#notification_body').val(), | ||||
|             notification_format: $('#notification_format').val(), | ||||
|             notification_title: $('#notification_title').val(), | ||||
|             notification_urls: $('.notification-urls').val(), | ||||
|             tags: $('#tags').val(), | ||||
|             window_url: window.location.href, | ||||
|         } | ||||
|  | ||||
|         $('.notifications-wrapper .spinner').fadeIn(); | ||||
|         $('#notification-test-log').show(); | ||||
|         $.ajax({ | ||||
|             type: "POST", | ||||
|             url: notification_base_url, | ||||
|             data: data, | ||||
|             statusCode: { | ||||
|                 400: function (data) { | ||||
|                     $("#notification-test-log>span").text(data.responseText); | ||||
|                 }, | ||||
|             } | ||||
|         }).done(function (data) { | ||||
|             $("#notification-test-log>span").text(data); | ||||
|         }).fail(function (jqXHR, textStatus, errorThrown) { | ||||
|             // Handle connection refused or other errors | ||||
|             if (textStatus === "error" && errorThrown === "") { | ||||
|                 console.error("Connection refused or server unreachable"); | ||||
|                 $("#notification-test-log>span").text("Error: Connection refused or server is unreachable."); | ||||
|             } else { | ||||
|                 console.error("Error:", textStatus, errorThrown); | ||||
|                 $("#notification-test-log>span").text("An error occurred: " + textStatus); | ||||
|             } | ||||
|         }).always(function () { | ||||
|             $('.notifications-wrapper .spinner').hide(); | ||||
|         }) | ||||
|     }); | ||||
| }); | ||||
|  | ||||
|   | ||||
| @@ -1,64 +1,106 @@ | ||||
| (function($) { | ||||
| (function ($) { | ||||
|     /** | ||||
|      * debounce | ||||
|      * @param {integer} milliseconds This param indicates the number of milliseconds | ||||
|      *     to wait after the last call before calling the original function. | ||||
|      * @param {object} What "this" refers to in the returned function. | ||||
|      * @return {function} This returns a function that when called will wait the | ||||
|      *     indicated number of milliseconds after the last call before | ||||
|      *     calling the original function. | ||||
|      */ | ||||
|     Function.prototype.debounce = function (milliseconds, context) { | ||||
|         var baseFunction = this, | ||||
|             timer = null, | ||||
|             wait = milliseconds; | ||||
|  | ||||
| /* | ||||
|     $('#code-block').highlightLines([ | ||||
|       { | ||||
|         'color': '#dd0000', | ||||
|         'lines': [10, 12] | ||||
|       }, | ||||
|       { | ||||
|         'color': '#ee0000', | ||||
|         'lines': [15, 18] | ||||
|       } | ||||
|     ]); | ||||
|   }); | ||||
| */ | ||||
|         return function () { | ||||
|             var self = context || this, | ||||
|                 args = arguments; | ||||
|  | ||||
|   $.fn.highlightLines = function(configurations) { | ||||
|     return this.each(function() { | ||||
|       const $pre = $(this); | ||||
|       const textContent = $pre.text(); | ||||
|       const lines = textContent.split(/\r?\n/); // Handles both \n and \r\n line endings | ||||
|             function complete() { | ||||
|                 baseFunction.apply(self, args); | ||||
|                 timer = null; | ||||
|             } | ||||
|  | ||||
|       // Build a map of line numbers to styles | ||||
|       const lineStyles = {}; | ||||
|             if (timer) { | ||||
|                 clearTimeout(timer); | ||||
|             } | ||||
|  | ||||
|       configurations.forEach(config => { | ||||
|         const { color, lines: lineNumbers } = config; | ||||
|         lineNumbers.forEach(lineNumber => { | ||||
|           lineStyles[lineNumber] = color; | ||||
|             timer = setTimeout(complete, wait); | ||||
|         }; | ||||
|     }; | ||||
|  | ||||
|     /** | ||||
|      * throttle | ||||
|      * @param {integer} milliseconds This param indicates the number of milliseconds | ||||
|      *     to wait between calls before calling the original function. | ||||
|      * @param {object} What "this" refers to in the returned function. | ||||
|      * @return {function} This returns a function that when called will wait the | ||||
|      *     indicated number of milliseconds between calls before | ||||
|      *     calling the original function. | ||||
|      */ | ||||
|     Function.prototype.throttle = function (milliseconds, context) { | ||||
|         var baseFunction = this, | ||||
|             lastEventTimestamp = null, | ||||
|             limit = milliseconds; | ||||
|  | ||||
|         return function () { | ||||
|             var self = context || this, | ||||
|                 args = arguments, | ||||
|                 now = Date.now(); | ||||
|  | ||||
|             if (!lastEventTimestamp || now - lastEventTimestamp >= limit) { | ||||
|                 lastEventTimestamp = now; | ||||
|                 baseFunction.apply(self, args); | ||||
|             } | ||||
|         }; | ||||
|     }; | ||||
|  | ||||
|     $.fn.highlightLines = function (configurations) { | ||||
|         return this.each(function () { | ||||
|             const $pre = $(this); | ||||
|             const textContent = $pre.text(); | ||||
|             const lines = textContent.split(/\r?\n/); // Handles both \n and \r\n line endings | ||||
|  | ||||
|             // Build a map of line numbers to styles | ||||
|             const lineStyles = {}; | ||||
|  | ||||
|             configurations.forEach(config => { | ||||
|                 const {color, lines: lineNumbers} = config; | ||||
|                 lineNumbers.forEach(lineNumber => { | ||||
|                     lineStyles[lineNumber] = color; | ||||
|                 }); | ||||
|             }); | ||||
|  | ||||
|             // Function to escape HTML characters | ||||
|             function escapeHtml(text) { | ||||
|                 return text.replace(/[&<>"'`=\/]/g, function (s) { | ||||
|                     return "&#" + s.charCodeAt(0) + ";"; | ||||
|                 }); | ||||
|             } | ||||
|  | ||||
|             // Process each line | ||||
|             const processedLines = lines.map((line, index) => { | ||||
|                 const lineNumber = index + 1; // Line numbers start at 1 | ||||
|                 const escapedLine = escapeHtml(line); | ||||
|                 const color = lineStyles[lineNumber]; | ||||
|  | ||||
|                 if (color) { | ||||
|                     // Wrap the line in a span with inline style | ||||
|                     return `<span style="background-color: ${color}">${escapedLine}</span>`; | ||||
|                 } else { | ||||
|                     return escapedLine; | ||||
|                 } | ||||
|             }); | ||||
|  | ||||
|             // Join the lines back together | ||||
|             const newContent = processedLines.join('\n'); | ||||
|  | ||||
|             // Set the new content as HTML | ||||
|             $pre.html(newContent); | ||||
|         }); | ||||
|       }); | ||||
|  | ||||
|       // Function to escape HTML characters | ||||
|       function escapeHtml(text) { | ||||
|         return text.replace(/[&<>"'`=\/]/g, function(s) { | ||||
|           return "&#" + s.charCodeAt(0) + ";"; | ||||
|         }); | ||||
|       } | ||||
|  | ||||
|       // Process each line | ||||
|       const processedLines = lines.map((line, index) => { | ||||
|         const lineNumber = index + 1; // Line numbers start at 1 | ||||
|         const escapedLine = escapeHtml(line); | ||||
|         const color = lineStyles[lineNumber]; | ||||
|  | ||||
|         if (color) { | ||||
|           // Wrap the line in a span with inline style | ||||
|           return `<span style="background-color: ${color}">${escapedLine}</span>`; | ||||
|         } else { | ||||
|           return escapedLine; | ||||
|         } | ||||
|       }); | ||||
|  | ||||
|       // Join the lines back together | ||||
|       const newContent = processedLines.join('\n'); | ||||
|  | ||||
|       // Set the new content as HTML | ||||
|       $pre.html(newContent); | ||||
|     }); | ||||
|   }; | ||||
|    $.fn.miniTabs = function(tabsConfig, options) { | ||||
|     }; | ||||
|     $.fn.miniTabs = function (tabsConfig, options) { | ||||
|         const settings = { | ||||
|             tabClass: 'minitab', | ||||
|             tabsContainerClass: 'minitabs', | ||||
| @@ -66,10 +108,10 @@ | ||||
|             ...(options || {}) | ||||
|         }; | ||||
|  | ||||
|         return this.each(function() { | ||||
|         return this.each(function () { | ||||
|             const $wrapper = $(this); | ||||
|             const $contents = $wrapper.find('div[id]').hide(); | ||||
|             const $tabsContainer = $('<div>', { class: settings.tabsContainerClass }).prependTo($wrapper); | ||||
|             const $tabsContainer = $('<div>', {class: settings.tabsContainerClass}).prependTo($wrapper); | ||||
|  | ||||
|             // Generate tabs | ||||
|             Object.entries(tabsConfig).forEach(([tabTitle, contentSelector], index) => { | ||||
| @@ -84,7 +126,7 @@ | ||||
|             }); | ||||
|  | ||||
|             // Tab click event | ||||
|             $tabsContainer.on('click', `.${settings.tabClass}`, function(e) { | ||||
|             $tabsContainer.on('click', `.${settings.tabClass}`, function (e) { | ||||
|                 e.preventDefault(); | ||||
|                 const $tab = $(this); | ||||
|                 const target = $tab.data('target'); | ||||
| @@ -103,7 +145,7 @@ | ||||
|     // Object to store ongoing requests by namespace | ||||
|     const requests = {}; | ||||
|  | ||||
|     $.abortiveSingularAjax = function(options) { | ||||
|     $.abortiveSingularAjax = function (options) { | ||||
|         const namespace = options.namespace || 'default'; | ||||
|  | ||||
|         // Abort the current request in this namespace if it's still ongoing | ||||
| @@ -117,4 +159,38 @@ | ||||
|         // Return the current request in case it's needed | ||||
|         return requests[namespace]; | ||||
|     }; | ||||
| })(jQuery); | ||||
| })(jQuery); | ||||
|  | ||||
|  | ||||
|  | ||||
| function toggleOpacity(checkboxSelector, fieldSelector, inverted) { | ||||
|     const checkbox = document.querySelector(checkboxSelector); | ||||
|     const fields = document.querySelectorAll(fieldSelector); | ||||
|  | ||||
|     function updateOpacity() { | ||||
|         const opacityValue = !checkbox.checked ? (inverted ? 0.6 : 1) : (inverted ? 1 : 0.6); | ||||
|         fields.forEach(field => { | ||||
|             field.style.opacity = opacityValue; | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     // Initial setup | ||||
|     updateOpacity(); | ||||
|     checkbox.addEventListener('change', updateOpacity); | ||||
| } | ||||
|  | ||||
| function toggleVisibility(checkboxSelector, fieldSelector, inverted) { | ||||
|     const checkbox = document.querySelector(checkboxSelector); | ||||
|     const fields = document.querySelectorAll(fieldSelector); | ||||
|  | ||||
|     function updateOpacity() { | ||||
|         const opacityValue = !checkbox.checked ? (inverted ? 'none' : 'block') : (inverted ? 'block' : 'none'); | ||||
|         fields.forEach(field => { | ||||
|             field.style.display = opacityValue; | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     // Initial setup | ||||
|     updateOpacity(); | ||||
|     checkbox.addEventListener('change', updateOpacity); | ||||
| } | ||||
|   | ||||
| @@ -1,14 +1,14 @@ | ||||
| $(function () { | ||||
|     /* add container before each proxy location to show status */ | ||||
|  | ||||
|     var option_li = $('.fetch-backend-proxy li').filter(function() { | ||||
|         return $("input",this)[0].value.length >0; | ||||
|     }); | ||||
|  | ||||
|     //var option_li = $('.fetch-backend-proxy li'); | ||||
|     var isActive = false; | ||||
|     $(option_li).prepend('<div class="proxy-status"></div>'); | ||||
|     $(option_li).append('<div class="proxy-timing"></div><div class="proxy-check-details"></div>'); | ||||
|  | ||||
|     function setup_html_widget() { | ||||
|         var option_li = $('.fetch-backend-proxy li').filter(function () { | ||||
|             return $("input", this)[0].value.length > 0; | ||||
|         }); | ||||
|         $(option_li).prepend('<div class="proxy-status"></div>'); | ||||
|         $(option_li).append('<div class="proxy-timing"></div><div class="proxy-check-details"></div>'); | ||||
|     } | ||||
|  | ||||
|     function set_proxy_check_status(proxy_key, state) { | ||||
|         // select input by value name | ||||
| @@ -59,8 +59,14 @@ $(function () { | ||||
|     } | ||||
|  | ||||
|     $('#check-all-proxies').click(function (e) { | ||||
|  | ||||
|         e.preventDefault() | ||||
|         $('body').addClass('proxy-check-active'); | ||||
|  | ||||
|         if (!$('body').hasClass('proxy-check-active')) { | ||||
|             setup_html_widget(); | ||||
|             $('body').addClass('proxy-check-active'); | ||||
|         } | ||||
|  | ||||
|         $('.proxy-check-details').html(''); | ||||
|         $('.proxy-status').html('<span class="spinner"></span>').fadeIn(); | ||||
|         $('.proxy-timing').html(''); | ||||
|   | ||||
							
								
								
									
										109
									
								
								changedetectionio/static/js/scheduler.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										109
									
								
								changedetectionio/static/js/scheduler.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,109 @@ | ||||
| function getTimeInTimezone(timezone) { | ||||
|     const now = new Date(); | ||||
|     const options = { | ||||
|         timeZone: timezone, | ||||
|         weekday: 'long', | ||||
|         year: 'numeric', | ||||
|         hour12: false, | ||||
|         month: '2-digit', | ||||
|         day: '2-digit', | ||||
|         hour: '2-digit', | ||||
|         minute: '2-digit', | ||||
|         second: '2-digit', | ||||
|     }; | ||||
|  | ||||
|     const formatter = new Intl.DateTimeFormat('en-US', options); | ||||
|     return formatter.format(now); | ||||
| } | ||||
|  | ||||
| $(document).ready(function () { | ||||
|  | ||||
|     let exceedsLimit = false; | ||||
|     const warning_text = $("#timespan-warning") | ||||
|     const timezone_text_widget = $("input[id*='time_schedule_limit-timezone']") | ||||
|  | ||||
|     toggleVisibility('#time_schedule_limit-enabled, #requests-time_schedule_limit-enabled', '#schedule-day-limits-wrapper', true) | ||||
|  | ||||
|     setInterval(() => { | ||||
|         let success = true; | ||||
|         try { | ||||
|             // Show the current local time according to either placeholder or entered TZ name | ||||
|             if (timezone_text_widget.val().length) { | ||||
|                 $('#local-time-in-tz').text(getTimeInTimezone(timezone_text_widget.val())); | ||||
|             } else { | ||||
|                 // So maybe use what is in the placeholder (which will be the default settings) | ||||
|                 $('#local-time-in-tz').text(getTimeInTimezone(timezone_text_widget.attr('placeholder'))); | ||||
|             } | ||||
|         } catch (error) { | ||||
|             success = false; | ||||
|             $('#local-time-in-tz').text(""); | ||||
|             console.error(timezone_text_widget.val()) | ||||
|         } | ||||
|  | ||||
|         $(timezone_text_widget).toggleClass('error', !success); | ||||
|  | ||||
|     }, 500); | ||||
|  | ||||
|     $('#schedule-day-limits-wrapper').on('change click blur', 'input, checkbox, select', function() { | ||||
|  | ||||
|         let allOk = true; | ||||
|  | ||||
|         // Controls setting the warning that the time could overlap into the next day | ||||
|         $("li.day-schedule").each(function () { | ||||
|             const $schedule = $(this); | ||||
|             const $checkbox = $schedule.find("input[type='checkbox']"); | ||||
|  | ||||
|             if ($checkbox.is(":checked")) { | ||||
|                 const timeValue = $schedule.find("input[type='time']").val(); | ||||
|                 const durationHours = parseInt($schedule.find("select[name*='-duration-hours']").val(), 10) || 0; | ||||
|                 const durationMinutes = parseInt($schedule.find("select[name*='-duration-minutes']").val(), 10) || 0; | ||||
|  | ||||
|                 if (timeValue) { | ||||
|                     const [startHours, startMinutes] = timeValue.split(":").map(Number); | ||||
|                     const totalMinutes = (startHours * 60 + startMinutes) + (durationHours * 60 + durationMinutes); | ||||
|  | ||||
|                     exceedsLimit = totalMinutes > 1440 | ||||
|                     if (exceedsLimit) { | ||||
|                         allOk = false | ||||
|                     } | ||||
|                     // Set the row/day-of-week highlight | ||||
|                     $schedule.toggleClass("warning", exceedsLimit); | ||||
|                 } | ||||
|             } else { | ||||
|                 $schedule.toggleClass("warning", false); | ||||
|             } | ||||
|         }); | ||||
|  | ||||
|         warning_text.toggle(!allOk) | ||||
|     }); | ||||
|  | ||||
|     $('table[id*="time_schedule_limit-saturday"], table[id*="time_schedule_limit-sunday"]').addClass("weekend-day") | ||||
|  | ||||
|     // Presets [weekend] [business hours] etc | ||||
|     $(document).on('click', '[data-template].set-schedule', function () { | ||||
|         // Get the value of the 'data-template' attribute | ||||
|         switch ($(this).attr('data-template')) { | ||||
|             case 'business-hours': | ||||
|                 $('.day-schedule table:not(.weekend-day) input[type="time"]').val('09:00') | ||||
|                 $('.day-schedule table:not(.weekend-day) select[id*="-duration-hours"]').val('8'); | ||||
|                 $('.day-schedule table:not(.weekend-day) select[id*="-duration-minutes"]').val('0'); | ||||
|                 $('.day-schedule input[id*="-enabled"]').prop('checked', true); | ||||
|                 $('.day-schedule .weekend-day input[id*="-enabled"]').prop('checked', false); | ||||
|                 break; | ||||
|             case 'weekend': | ||||
|                 $('.day-schedule .weekend-day input[type="time"][id$="start-time"]').val('00:00') | ||||
|                 $('.day-schedule .weekend-day select[id*="-duration-hours"]').val('24'); | ||||
|                 $('.day-schedule .weekend-day select[id*="-duration-minutes"]').val('0'); | ||||
|                 $('.day-schedule input[id*="-enabled"]').prop('checked', false); | ||||
|                 $('.day-schedule .weekend-day input[id*="-enabled"]').prop('checked', true); | ||||
|                 break; | ||||
|             case 'reset': | ||||
|  | ||||
|                 $('.day-schedule input[type="time"]').val('00:00') | ||||
|                 $('.day-schedule select[id*="-duration-hours"]').val('24'); | ||||
|                 $('.day-schedule select[id*="-duration-minutes"]').val('0'); | ||||
|                 $('.day-schedule input[id*="-enabled"]').prop('checked', true); | ||||
|                 break; | ||||
|         } | ||||
|     }); | ||||
| }); | ||||
| @@ -1,49 +1,66 @@ | ||||
| // Rewrite this is a plugin.. is all this JS really 'worth it?' | ||||
| (function ($) { | ||||
|     $.fn.hashTabs = function (options) { | ||||
|         var settings = $.extend({ | ||||
|             tabContainer: ".tabs ul", | ||||
|             tabSelector: "li a", | ||||
|             tabContent: ".tab-pane-inner", | ||||
|             activeClass: "active", | ||||
|             errorClass: ".messages .error", | ||||
|             bodyClassToggle: "full-width" | ||||
|         }, options); | ||||
|  | ||||
| window.addEventListener('hashchange', function () { | ||||
|     var tabs = document.getElementsByClassName('active'); | ||||
|     while (tabs[0]) { | ||||
|         tabs[0].classList.remove('active'); | ||||
|         document.body.classList.remove('full-width'); | ||||
|     } | ||||
|     set_active_tab(); | ||||
| }, false); | ||||
|         var $tabs = $(settings.tabContainer).find(settings.tabSelector); | ||||
|  | ||||
| var has_errors = document.querySelectorAll(".messages .error"); | ||||
| if (!has_errors.length) { | ||||
|     if (document.location.hash == "") { | ||||
|         location.replace(document.querySelector(".tabs ul li:first-child a").hash); | ||||
|     } else { | ||||
|         set_active_tab(); | ||||
|     } | ||||
| } else { | ||||
|     focus_error_tab(); | ||||
| } | ||||
|         function setActiveTab() { | ||||
|             var hash = window.location.hash; | ||||
|             var $activeTab = $tabs.filter("[href='" + hash + "']"); | ||||
|  | ||||
| function set_active_tab() { | ||||
|     document.body.classList.remove('full-width'); | ||||
|     var tab = document.querySelectorAll("a[href='" + location.hash + "']"); | ||||
|     if (tab.length) { | ||||
|         tab[0].parentElement.className = "active"; | ||||
|     } | ||||
|     // hash could move the page down | ||||
|     window.scrollTo(0, 0); | ||||
| } | ||||
|             // Remove active class from all tabs | ||||
|             $(settings.tabContainer).find("li").removeClass(settings.activeClass); | ||||
|  | ||||
| function focus_error_tab() { | ||||
|     // time to use jquery or vuejs really, | ||||
|     // activate the tab with the error | ||||
|     var tabs = document.querySelectorAll('.tabs li a'), i; | ||||
|     for (i = 0; i < tabs.length; ++i) { | ||||
|         var tab_name = tabs[i].hash.replace('#', ''); | ||||
|         var pane_errors = document.querySelectorAll('#' + tab_name + ' .error') | ||||
|         if (pane_errors.length) { | ||||
|             document.location.hash = '#' + tab_name; | ||||
|             return true; | ||||
|             // Add active class to selected tab | ||||
|             if ($activeTab.length) { | ||||
|                 $activeTab.parent().addClass(settings.activeClass); | ||||
|             } | ||||
|  | ||||
|             // Show the correct content | ||||
|             $(settings.tabContent).hide(); | ||||
|             if (hash) { | ||||
|                 $(hash).show(); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|     return false; | ||||
| } | ||||
|  | ||||
|  | ||||
|         function focusErrorTab() { | ||||
|             $tabs.each(function () { | ||||
|                 var tabName = this.hash.replace("#", ""); | ||||
|                 if ($("#" + tabName).find(settings.errorClass).length) { | ||||
|                     window.location.hash = "#" + tabName; | ||||
|                     return false; // Stop loop on first error tab | ||||
|                 } | ||||
|             }); | ||||
|         } | ||||
|  | ||||
|         function initializeTabs() { | ||||
|             if ($(settings.errorClass).length) { | ||||
|                 focusErrorTab(); | ||||
|             } else if (!window.location.hash) { | ||||
|                 window.location.replace($tabs.first().attr("href")); | ||||
|             } else { | ||||
|                 setActiveTab(); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         // Listen for hash changes | ||||
|         $(window).on("hashchange", setActiveTab); | ||||
|  | ||||
|         // Initialize on page load | ||||
|         initializeTabs(); | ||||
|  | ||||
|         return this; // Enable jQuery chaining | ||||
|     }; | ||||
| })(jQuery); | ||||
|  | ||||
|  | ||||
| $(document).ready(function () { | ||||
|     $(".tabs").hashTabs(); | ||||
| }); | ||||
| @@ -49,4 +49,9 @@ $(document).ready(function () { | ||||
|         $("#overlay").toggleClass('visible'); | ||||
|         heartpath.style.fill = document.getElementById("overlay").classList.contains("visible") ? '#ff0000' : 'var(--color-background)'; | ||||
|     }); | ||||
|  | ||||
|     setInterval(function () { | ||||
|         $('body').toggleClass('spinner-active', $.active > 0); | ||||
|     }, 2000); | ||||
|  | ||||
| }); | ||||
|   | ||||
| @@ -132,6 +132,7 @@ $(document).ready(() => { | ||||
|         }).done((data) => { | ||||
|             $fetchingUpdateNoticeElem.html("Rendering.."); | ||||
|             selectorData = data; | ||||
|  | ||||
|             sortScrapedElementsBySize(); | ||||
|             console.log(`Reported browser width from backend: ${data['browser_width']}`); | ||||
|  | ||||
|   | ||||
| @@ -1,17 +1,3 @@ | ||||
| function toggleOpacity(checkboxSelector, fieldSelector, inverted) { | ||||
|     const checkbox = document.querySelector(checkboxSelector); | ||||
|     const fields = document.querySelectorAll(fieldSelector); | ||||
|     function updateOpacity() { | ||||
|         const opacityValue = !checkbox.checked ? (inverted ? 0.6 : 1) : (inverted ? 1 : 0.6); | ||||
|         fields.forEach(field => { | ||||
|             field.style.opacity = opacityValue; | ||||
|         }); | ||||
|     } | ||||
|     // Initial setup | ||||
|     updateOpacity(); | ||||
|     checkbox.addEventListener('change', updateOpacity); | ||||
| } | ||||
|  | ||||
|  | ||||
| function request_textpreview_update() { | ||||
|     if (!$('body').hasClass('preview-text-enabled')) { | ||||
| @@ -26,25 +12,28 @@ function request_textpreview_update() { | ||||
|         data[name] = $element.is(':checkbox') ? ($element.is(':checked') ? $element.val() : false) : $element.val(); | ||||
|     }); | ||||
|  | ||||
|     $('body').toggleClass('spinner-active', 1); | ||||
|  | ||||
|     $.abortiveSingularAjax({ | ||||
|         type: "POST", | ||||
|         url: preview_text_edit_filters_url, | ||||
|         data: data, | ||||
|         namespace: 'watchEdit' | ||||
|     }).done(function (data) { | ||||
|         console.debug(data['duration']) | ||||
|         $('#filters-and-triggers #text-preview-before-inner').text(data['before_filter']); | ||||
|  | ||||
|         $('#filters-and-triggers #text-preview-inner') | ||||
|             .text(data['after_filter']) | ||||
|             .highlightLines([ | ||||
|                 { | ||||
|                     'color': '#ee0000', | ||||
|                     'lines': data['trigger_line_numbers'] | ||||
|                 }, | ||||
|                 { | ||||
|                     'color': '#757575', | ||||
|                     'lines': data['ignore_line_numbers'] | ||||
|                 } | ||||
|             ]); | ||||
|  | ||||
|  | ||||
|  | ||||
|             ]) | ||||
|     }).fail(function (error) { | ||||
|         if (error.statusText === 'abort') { | ||||
|             console.log('Request was aborted due to a new request being fired.'); | ||||
| @@ -54,7 +43,9 @@ function request_textpreview_update() { | ||||
|     }) | ||||
| } | ||||
|  | ||||
|  | ||||
| $(document).ready(function () { | ||||
|  | ||||
|     $('#notification-setting-reset-to-default').click(function (e) { | ||||
|         $('#notification_title').val(''); | ||||
|         $('#notification_body').val(''); | ||||
| @@ -67,24 +58,20 @@ $(document).ready(function () { | ||||
|         $('#notification-tokens-info').toggle(); | ||||
|     }); | ||||
|  | ||||
|     toggleOpacity('#time_between_check_use_default', '#time_between_check', false); | ||||
|     toggleOpacity('#time_between_check_use_default', '#time-check-widget-wrapper, #time-between-check-schedule', false); | ||||
|  | ||||
|  | ||||
|     const vh = Math.max(document.documentElement.clientHeight || 0, window.innerHeight || 0); | ||||
|     $("#text-preview-inner").css('max-height', (vh-300)+"px"); | ||||
|     $("#text-preview-before-inner").css('max-height', (vh-300)+"px"); | ||||
|  | ||||
|     // Realtime preview of 'Filters & Text' setup | ||||
|     var debounced_request_textpreview_update = request_textpreview_update.debounce(100); | ||||
|     $("#text-preview-inner").css('max-height', (vh - 300) + "px"); | ||||
|     $("#text-preview-before-inner").css('max-height', (vh - 300) + "px"); | ||||
|  | ||||
|     $("#activate-text-preview").click(function (e) { | ||||
|         $('body').toggleClass('preview-text-enabled') | ||||
|         request_textpreview_update(); | ||||
|  | ||||
|         const method = $('body').hasClass('preview-text-enabled') ? 'on' : 'off'; | ||||
|         $("#text-preview-refresh")[method]('click', debounced_request_textpreview_update); | ||||
|         $('textarea:visible')[method]('keyup blur', debounced_request_textpreview_update); | ||||
|         $('input:visible')[method]('keyup blur change', debounced_request_textpreview_update); | ||||
|         $("#filters-and-triggers-tab")[method]('click', debounced_request_textpreview_update); | ||||
|         $('#filters-and-triggers textarea')[method]('blur', request_textpreview_update.throttle(1000)); | ||||
|         $('#filters-and-triggers input')[method]('change', request_textpreview_update.throttle(1000)); | ||||
|         $("#filters-and-triggers-tab")[method]('click', request_textpreview_update.throttle(1000)); | ||||
|     }); | ||||
|     $('.minitabs-wrapper').miniTabs({ | ||||
|         "Content after filters": "#text-preview-inner", | ||||
|   | ||||
| @@ -153,7 +153,8 @@ html[data-darkmode="true"] { | ||||
|     border: 1px solid transparent; | ||||
|     vertical-align: top; | ||||
|     font: 1em monospace; | ||||
|     text-align: left; } | ||||
|     text-align: left; | ||||
|     overflow: clip; } | ||||
|   #diff-ui pre { | ||||
|     white-space: pre-wrap; } | ||||
|  | ||||
| @@ -172,7 +173,9 @@ ins { | ||||
|   text-decoration: none; } | ||||
|  | ||||
| #result { | ||||
|   white-space: pre-wrap; } | ||||
|   white-space: pre-wrap; | ||||
|   word-break: break-word; | ||||
|   overflow-wrap: break-word; } | ||||
|  | ||||
| #settings { | ||||
|   background: rgba(0, 0, 0, 0.05); | ||||
| @@ -231,3 +234,12 @@ td#diff-col div { | ||||
|   border-radius: 5px; | ||||
|   background: var(--color-background); | ||||
|   box-shadow: 1px 1px 4px var(--color-shadow-jump); } | ||||
|  | ||||
| .pure-form button.reset-margin { | ||||
|   margin: 0px; } | ||||
|  | ||||
| .diff-fieldset { | ||||
|   display: flex; | ||||
|   align-items: center; | ||||
|   gap: 4px; | ||||
|   flex-wrap: wrap; } | ||||
|   | ||||
| @@ -24,6 +24,7 @@ | ||||
|     vertical-align: top; | ||||
|     font: 1em monospace; | ||||
|     text-align: left; | ||||
|     overflow: clip; // clip overflowing contents to cell boundariess | ||||
|   } | ||||
|  | ||||
|   pre { | ||||
| @@ -50,6 +51,8 @@ ins { | ||||
|  | ||||
| #result { | ||||
|   white-space: pre-wrap; | ||||
|   word-break: break-word; | ||||
|   overflow-wrap: break-word; | ||||
|  | ||||
|   .change { | ||||
|     span {} | ||||
| @@ -134,3 +137,15 @@ td#diff-col div { | ||||
|   background: var(--color-background); | ||||
|   box-shadow: 1px 1px 4px var(--color-shadow-jump); | ||||
| } | ||||
|  | ||||
| // resets button margin to 0px | ||||
| .pure-form button.reset-margin { | ||||
|   margin: 0px; | ||||
| } | ||||
|  | ||||
| .diff-fieldset { | ||||
|   display: flex; | ||||
|   align-items: center; | ||||
|   gap: 4px; | ||||
|   flex-wrap: wrap; | ||||
| } | ||||
| @@ -11,7 +11,22 @@ ul#requests-extra_browsers { | ||||
|   /* each proxy entry is a `table` */ | ||||
|   table { | ||||
|     tr { | ||||
|       display: inline; | ||||
|       display: table-row; // default display for small screens | ||||
|       input[type=text] { | ||||
|         width: 100%; | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|    | ||||
|   // apply inline display for larger screens | ||||
|   @media only screen and (min-width: 1280px) { | ||||
|     table { | ||||
|       tr { | ||||
|         display: inline; | ||||
|         input[type=text] { | ||||
|           width: 100%; | ||||
|         } | ||||
|       } | ||||
|     } | ||||
|   } | ||||
| } | ||||
|   | ||||
| @@ -11,7 +11,19 @@ ul#requests-extra_proxies { | ||||
|   /* each proxy entry is a `table` */ | ||||
|   table { | ||||
|     tr { | ||||
|       display: inline; | ||||
|       display: table-row; // default display for small screens | ||||
|       input[type=text] { | ||||
|         width: 100%; | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|    | ||||
|   // apply inline display for large screens | ||||
|   @media only screen and (min-width: 1024px) { | ||||
|     table { | ||||
|       tr { | ||||
|         display: inline; | ||||
|       } | ||||
|     } | ||||
|   } | ||||
| } | ||||
| @@ -25,15 +37,19 @@ ul#requests-extra_proxies { | ||||
|  | ||||
| body.proxy-check-active { | ||||
|   #request { | ||||
|     // Padding set by flex layout | ||||
|     /* | ||||
|     .proxy-status { | ||||
|       width: 2em; | ||||
|     } | ||||
|     */ | ||||
|  | ||||
|     .proxy-check-details { | ||||
|       font-size: 80%; | ||||
|       color: #555; | ||||
|       display: block; | ||||
|       padding-left: 4em; | ||||
|       padding-left: 2em; | ||||
|       max-width: 500px; | ||||
|     } | ||||
|  | ||||
|     .proxy-timing { | ||||
|   | ||||
| @@ -7,6 +7,16 @@ | ||||
|     border-top: none; | ||||
|   } | ||||
|  | ||||
|   .minitabs-content { | ||||
|     width: 100%; | ||||
|     display: flex; | ||||
|     > div { | ||||
|       flex: 1 1 auto; | ||||
|       min-width: 0; | ||||
|       overflow: scroll; | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   .minitabs { | ||||
|     display: flex; | ||||
|     border-bottom: 1px solid #ccc; | ||||
|   | ||||
| @@ -42,9 +42,8 @@ body.preview-text-enabled { | ||||
|     color: var(--color-text-input); | ||||
|     font-family: "Courier New", Courier, monospace; /* Sets the font to a monospace type */ | ||||
|     font-size: 70%; | ||||
|     overflow-x: scroll; | ||||
|     word-break: break-word; | ||||
|     white-space: pre-wrap; /* Preserves whitespace and line breaks like <pre> */ | ||||
|     overflow-wrap: break-word; /* Allows long words to break and wrap to the next line */ | ||||
|   } | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -106,10 +106,34 @@ button.toggle-button { | ||||
|   padding: 5px; | ||||
|   display: flex; | ||||
|   justify-content: space-between; | ||||
|   border-bottom: 2px solid var(--color-menu-accent); | ||||
|   align-items: center; | ||||
| } | ||||
|  | ||||
| #pure-menu-horizontal-spinner { | ||||
|   height: 3px; | ||||
|   background: linear-gradient(-75deg, #ff6000, #ff8f00, #ffdd00, #ed0000); | ||||
|   background-size: 400% 400%; | ||||
|   width: 100%; | ||||
|   animation: gradient 200s ease infinite; | ||||
| } | ||||
|  | ||||
| body.spinner-active { | ||||
|   #pure-menu-horizontal-spinner { | ||||
|     animation: gradient 1s ease infinite; | ||||
|   } | ||||
| } | ||||
|  | ||||
| @keyframes gradient { | ||||
| 	0% { | ||||
| 		background-position: 0% 50%; | ||||
| 	} | ||||
| 	50% { | ||||
| 		background-position: 100% 50%; | ||||
| 	} | ||||
| 	100% { | ||||
| 		background-position: 0% 50%; | ||||
| 	} | ||||
| } | ||||
| .pure-menu-heading { | ||||
|   color: var(--color-text-menu-heading); | ||||
| } | ||||
| @@ -123,8 +147,14 @@ button.toggle-button { | ||||
|   } | ||||
| } | ||||
|  | ||||
|  | ||||
| .tab-pane-inner { | ||||
|   // .tab-pane-inner will have the #id that the tab button jumps/anchors to | ||||
|   scroll-margin-top: 200px; | ||||
| } | ||||
|  | ||||
| section.content { | ||||
|   padding-top: 5em; | ||||
|   padding-top: 100px; | ||||
|   padding-bottom: 1em; | ||||
|   flex-direction: column; | ||||
|   display: flex; | ||||
| @@ -350,7 +380,15 @@ a.pure-button-selected { | ||||
| } | ||||
|  | ||||
| .notifications-wrapper { | ||||
|   padding: 0.5rem 0 1rem 0; | ||||
|   padding-top: 0.5rem; | ||||
|   #notification-test-log { | ||||
|     padding-top: 1rem; | ||||
|     white-space: pre-wrap; | ||||
|     word-break: break-word; | ||||
|     overflow-wrap: break-word; | ||||
|     max-width: 100%; | ||||
|     box-sizing: border-box; | ||||
|   } | ||||
| } | ||||
|  | ||||
| label { | ||||
| @@ -907,14 +945,7 @@ $form-edge-padding: 20px; | ||||
| } | ||||
|  | ||||
| .tab-pane-inner { | ||||
|   &:not(:target) { | ||||
|     display: none; | ||||
|   } | ||||
|  | ||||
|   &:target { | ||||
|     display: block; | ||||
|   } | ||||
|  | ||||
|   display: none; | ||||
|   // doesnt need padding because theres another row of buttons/activity | ||||
|   padding: 0px; | ||||
| } | ||||
|   | ||||
| @@ -112,26 +112,34 @@ ul#requests-extra_proxies { | ||||
|   ul#requests-extra_proxies li > label { | ||||
|     display: none; } | ||||
|   ul#requests-extra_proxies table tr { | ||||
|     display: inline; } | ||||
|     display: table-row; } | ||||
|     ul#requests-extra_proxies table tr input[type=text] { | ||||
|       width: 100%; } | ||||
|   @media only screen and (min-width: 1024px) { | ||||
|     ul#requests-extra_proxies table tr { | ||||
|       display: inline; } } | ||||
|  | ||||
| #request { | ||||
|   /* Auto proxy scan/checker */ } | ||||
|   #request label[for=proxy] { | ||||
|     display: inline-block; } | ||||
|  | ||||
| body.proxy-check-active #request .proxy-status { | ||||
|   width: 2em; } | ||||
|  | ||||
| body.proxy-check-active #request .proxy-check-details { | ||||
|   font-size: 80%; | ||||
|   color: #555; | ||||
|   display: block; | ||||
|   padding-left: 4em; } | ||||
|  | ||||
| body.proxy-check-active #request .proxy-timing { | ||||
|   font-size: 80%; | ||||
|   padding-left: 1rem; | ||||
|   color: var(--color-link); } | ||||
| body.proxy-check-active #request { | ||||
|   /* | ||||
|     .proxy-status { | ||||
|       width: 2em; | ||||
|     } | ||||
|     */ } | ||||
|   body.proxy-check-active #request .proxy-check-details { | ||||
|     font-size: 80%; | ||||
|     color: #555; | ||||
|     display: block; | ||||
|     padding-left: 2em; | ||||
|     max-width: 500px; } | ||||
|   body.proxy-check-active #request .proxy-timing { | ||||
|     font-size: 80%; | ||||
|     padding-left: 1rem; | ||||
|     color: var(--color-link); } | ||||
|  | ||||
| #recommended-proxy { | ||||
|   display: grid; | ||||
| @@ -158,7 +166,14 @@ ul#requests-extra_browsers { | ||||
|   ul#requests-extra_browsers li > label { | ||||
|     display: none; } | ||||
|   ul#requests-extra_browsers table tr { | ||||
|     display: inline; } | ||||
|     display: table-row; } | ||||
|     ul#requests-extra_browsers table tr input[type=text] { | ||||
|       width: 100%; } | ||||
|   @media only screen and (min-width: 1280px) { | ||||
|     ul#requests-extra_browsers table tr { | ||||
|       display: inline; } | ||||
|       ul#requests-extra_browsers table tr input[type=text] { | ||||
|         width: 100%; } } | ||||
|  | ||||
| #extra-browsers-setting { | ||||
|   border: 1px solid var(--color-grey-800); | ||||
| @@ -434,6 +449,13 @@ html[data-darkmode="true"] #toggle-light-mode .icon-dark { | ||||
|     padding: 20px; | ||||
|     border: 1px solid #ccc; | ||||
|     border-top: none; } | ||||
|   .minitabs-wrapper .minitabs-content { | ||||
|     width: 100%; | ||||
|     display: flex; } | ||||
|     .minitabs-wrapper .minitabs-content > div { | ||||
|       flex: 1 1 auto; | ||||
|       min-width: 0; | ||||
|       overflow: scroll; } | ||||
|   .minitabs-wrapper .minitabs { | ||||
|     display: flex; | ||||
|     border-bottom: 1px solid #ccc; } | ||||
| @@ -488,11 +510,9 @@ body.preview-text-enabled { | ||||
|     font-family: "Courier New", Courier, monospace; | ||||
|     /* Sets the font to a monospace type */ | ||||
|     font-size: 70%; | ||||
|     overflow-x: scroll; | ||||
|     word-break: break-word; | ||||
|     white-space: pre-wrap; | ||||
|     /* Preserves whitespace and line breaks like <pre> */ | ||||
|     overflow-wrap: break-word; | ||||
|     /* Allows long words to break and wrap to the next line */ } | ||||
|     /* Preserves whitespace and line breaks like <pre> */ } | ||||
|  | ||||
| #activate-text-preview { | ||||
|   right: 0; | ||||
| @@ -568,9 +588,26 @@ button.toggle-button { | ||||
|   padding: 5px; | ||||
|   display: flex; | ||||
|   justify-content: space-between; | ||||
|   border-bottom: 2px solid var(--color-menu-accent); | ||||
|   align-items: center; } | ||||
|  | ||||
| #pure-menu-horizontal-spinner { | ||||
|   height: 3px; | ||||
|   background: linear-gradient(-75deg, #ff6000, #ff8f00, #ffdd00, #ed0000); | ||||
|   background-size: 400% 400%; | ||||
|   width: 100%; | ||||
|   animation: gradient 200s ease infinite; } | ||||
|  | ||||
| body.spinner-active #pure-menu-horizontal-spinner { | ||||
|   animation: gradient 1s ease infinite; } | ||||
|  | ||||
| @keyframes gradient { | ||||
|   0% { | ||||
|     background-position: 0% 50%; } | ||||
|   50% { | ||||
|     background-position: 100% 50%; } | ||||
|   100% { | ||||
|     background-position: 0% 50%; } } | ||||
|  | ||||
| .pure-menu-heading { | ||||
|   color: var(--color-text-menu-heading); } | ||||
|  | ||||
| @@ -580,8 +617,11 @@ button.toggle-button { | ||||
|     background-color: var(--color-background-menu-link-hover); | ||||
|     color: var(--color-text-menu-link-hover); } | ||||
|  | ||||
| .tab-pane-inner { | ||||
|   scroll-margin-top: 200px; } | ||||
|  | ||||
| section.content { | ||||
|   padding-top: 5em; | ||||
|   padding-top: 100px; | ||||
|   padding-bottom: 1em; | ||||
|   flex-direction: column; | ||||
|   display: flex; | ||||
| @@ -740,7 +780,14 @@ a.pure-button-selected { | ||||
|   cursor: pointer; } | ||||
|  | ||||
| .notifications-wrapper { | ||||
|   padding: 0.5rem 0 1rem 0; } | ||||
|   padding-top: 0.5rem; } | ||||
|   .notifications-wrapper #notification-test-log { | ||||
|     padding-top: 1rem; | ||||
|     white-space: pre-wrap; | ||||
|     word-break: break-word; | ||||
|     overflow-wrap: break-word; | ||||
|     max-width: 100%; | ||||
|     box-sizing: border-box; } | ||||
|  | ||||
| label:hover { | ||||
|   cursor: pointer; } | ||||
| @@ -1112,11 +1159,8 @@ textarea::placeholder { | ||||
|   border-radius: 5px; } | ||||
|  | ||||
| .tab-pane-inner { | ||||
|   display: none; | ||||
|   padding: 0px; } | ||||
|   .tab-pane-inner:not(:target) { | ||||
|     display: none; } | ||||
|   .tab-pane-inner:target { | ||||
|     display: block; } | ||||
|  | ||||
| .beta-logo { | ||||
|   height: 50px; | ||||
|   | ||||
| @@ -4,6 +4,7 @@ from flask import ( | ||||
|     flash | ||||
| ) | ||||
|  | ||||
| from .html_tools import TRANSLATE_WHITESPACE_TABLE | ||||
| from . model import App, Watch | ||||
| from copy import deepcopy, copy | ||||
| from os import path, unlink | ||||
| @@ -373,7 +374,7 @@ class ChangeDetectionStore: | ||||
|     def visualselector_data_is_ready(self, watch_uuid): | ||||
|         output_path = "{}/{}".format(self.datastore_path, watch_uuid) | ||||
|         screenshot_filename = "{}/last-screenshot.png".format(output_path) | ||||
|         elements_index_filename = "{}/elements.json".format(output_path) | ||||
|         elements_index_filename = "{}/elements.deflate".format(output_path) | ||||
|         if path.isfile(screenshot_filename) and  path.isfile(elements_index_filename) : | ||||
|             return True | ||||
|  | ||||
| @@ -750,17 +751,17 @@ class ChangeDetectionStore: | ||||
|     def update_5(self): | ||||
|         # If the watch notification body, title look the same as the global one, unset it, so the watch defaults back to using the main settings | ||||
|         # In other words - the watch notification_title and notification_body are not needed if they are the same as the default one | ||||
|         current_system_body = self.data['settings']['application']['notification_body'].translate(str.maketrans('', '', "\r\n ")) | ||||
|         current_system_title = self.data['settings']['application']['notification_body'].translate(str.maketrans('', '', "\r\n ")) | ||||
|         current_system_body = self.data['settings']['application']['notification_body'].translate(TRANSLATE_WHITESPACE_TABLE) | ||||
|         current_system_title = self.data['settings']['application']['notification_body'].translate(TRANSLATE_WHITESPACE_TABLE) | ||||
|         for uuid, watch in self.data['watching'].items(): | ||||
|             try: | ||||
|                 watch_body = watch.get('notification_body', '') | ||||
|                 if watch_body and watch_body.translate(str.maketrans('', '', "\r\n ")) == current_system_body: | ||||
|                 if watch_body and watch_body.translate(TRANSLATE_WHITESPACE_TABLE) == current_system_body: | ||||
|                     # Looks the same as the default one, so unset it | ||||
|                     watch['notification_body'] = None | ||||
|  | ||||
|                 watch_title = watch.get('notification_title', '') | ||||
|                 if watch_title and watch_title.translate(str.maketrans('', '', "\r\n ")) == current_system_title: | ||||
|                 if watch_title and watch_title.translate(TRANSLATE_WHITESPACE_TABLE) == current_system_title: | ||||
|                     # Looks the same as the default one, so unset it | ||||
|                     watch['notification_title'] = None | ||||
|             except Exception as e: | ||||
| @@ -908,3 +909,18 @@ class ChangeDetectionStore: | ||||
|             if self.data['watching'][uuid].get('in_stock_only'): | ||||
|                 del (self.data['watching'][uuid]['in_stock_only']) | ||||
|  | ||||
|     # Compress old elements.json to elements.deflate, saving disk, this compression is pretty fast. | ||||
|     def update_19(self): | ||||
|         import zlib | ||||
|  | ||||
|         for uuid, watch in self.data['watching'].items(): | ||||
|             json_path = os.path.join(self.datastore_path, uuid, "elements.json") | ||||
|             deflate_path = os.path.join(self.datastore_path, uuid, "elements.deflate") | ||||
|  | ||||
|             if os.path.exists(json_path): | ||||
|                 with open(json_path, "rb") as f_j: | ||||
|                     with open(deflate_path, "wb") as f_d: | ||||
|                         logger.debug(f"Compressing {str(json_path)} to {str(deflate_path)}..") | ||||
|                         f_d.write(zlib.compress(f_j.read())) | ||||
|                         os.unlink(json_path) | ||||
|  | ||||
|   | ||||
| @@ -24,11 +24,13 @@ | ||||
|                               </ul> | ||||
|                             </div> | ||||
|                             <div class="notifications-wrapper"> | ||||
|                               <a id="send-test-notification" class="pure-button button-secondary button-xsmall" >Send test notification</a> | ||||
|                               <a id="send-test-notification" class="pure-button button-secondary button-xsmall" >Send test notification</a> <div class="spinner"  style="display: none;"></div> | ||||
|                             {% if emailprefix %} | ||||
|                               <a id="add-email-helper" class="pure-button button-secondary button-xsmall" >Add email <img style="height: 1em; display: inline-block" src="{{url_for('static_content', group='images', filename='email.svg')}}" alt="Add an email address"> </a> | ||||
|                             {% endif %} | ||||
|                               <a href="{{url_for('notification_logs')}}" class="pure-button button-secondary button-xsmall" >Notification debug logs</a> | ||||
|                               <br> | ||||
|                                 <div id="notification-test-log" style="display: none;"><span class="pure-form-message-inline">Processing..</span></div> | ||||
|                             </div> | ||||
|                         </div> | ||||
|                         <div id="notification-customisation" class="pure-control-group"> | ||||
|   | ||||
| @@ -59,4 +59,100 @@ | ||||
|  | ||||
| {% macro render_button(field) %} | ||||
|   {{ field(**kwargs)|safe }} | ||||
| {% endmacro %} | ||||
|  | ||||
| {% macro render_time_schedule_form(form, available_timezones, timezone_default_config) %} | ||||
|     <style> | ||||
|     .day-schedule *, .day-schedule select { | ||||
|         display: inline-block; | ||||
|     } | ||||
|  | ||||
|     .day-schedule label[for*="time_schedule_limit-"][for$="-enabled"] { | ||||
|         min-width: 6rem; | ||||
|         font-weight: bold; | ||||
|     } | ||||
|     .day-schedule label { | ||||
|         font-weight: normal; | ||||
|     } | ||||
|  | ||||
|     .day-schedule table label { | ||||
|         padding-left: 0.5rem; | ||||
|         padding-right: 0.5rem; | ||||
|     } | ||||
|     #timespan-warning, input[id*='time_schedule_limit-timezone'].error { | ||||
|         color: #ff0000; | ||||
|     } | ||||
|     .day-schedule.warning table { | ||||
|         background-color: #ffbbc2; | ||||
|     } | ||||
|     ul#day-wrapper { | ||||
|         list-style: none; | ||||
|     } | ||||
|     #timezone-info > * { | ||||
|         display: inline-block; | ||||
|     } | ||||
|  | ||||
|     #scheduler-icon-label { | ||||
|         background-position: left center; | ||||
|         background-repeat: no-repeat; | ||||
|         background-size: contain; | ||||
|         display: inline-block; | ||||
|         vertical-align: middle; | ||||
|         padding-left: 50px; | ||||
|         background-image: url({{ url_for('static_content', group='images', filename='schedule.svg') }}); | ||||
|     } | ||||
|     #timespan-warning { | ||||
|         display: none; | ||||
|     } | ||||
|     </style> | ||||
|     <br> | ||||
|  | ||||
|     {% if timezone_default_config %} | ||||
|     <div> | ||||
|         <span id="scheduler-icon-label" style=""> | ||||
|             {{ render_checkbox_field(form.time_schedule_limit.enabled) }} | ||||
|             <div class="pure-form-message-inline"> | ||||
|                 Set a hourly/week day schedule | ||||
|             </div> | ||||
|         </span> | ||||
|  | ||||
|     </div> | ||||
|     <br> | ||||
|     <div id="schedule-day-limits-wrapper"> | ||||
|         <label>Schedule time limits</label><a data-template="business-hours" | ||||
|                                               class="set-schedule pure-button button-secondary button-xsmall">Business | ||||
|         hours</a> | ||||
|         <a data-template="weekend" class="set-schedule pure-button button-secondary button-xsmall">Weekends</a> | ||||
|         <a data-template="reset" class="set-schedule pure-button button-xsmall">Reset</a><br> | ||||
|         <br> | ||||
|  | ||||
|         <ul id="day-wrapper"> | ||||
|             {% for day in ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday'] %} | ||||
|                 <li class="day-schedule" id="schedule-{{ day }}"> | ||||
|                     {{ render_nolabel_field(form.time_schedule_limit[day]) }} | ||||
|                 </li> | ||||
|             {% endfor %} | ||||
|             <li id="timespan-warning">Warning, one or more of your 'days' has a duration that would extend into the next day.<br> | ||||
|             This could have unintended consequences.</li> | ||||
|             <li id="timezone-info"> | ||||
|                 {{ render_field(form.time_schedule_limit.timezone, placeholder=timezone_default_config) }} <span id="local-time-in-tz"></span> | ||||
|                 <datalist id="timezones" style="display: none;"> | ||||
|                     {% for timezone in available_timezones %} | ||||
|                         <option value="{{ timezone }}">{{ timezone }}</option> | ||||
|                     {% endfor %} | ||||
|                 </datalist> | ||||
|             </li> | ||||
|         </ul> | ||||
|     <br> | ||||
|         <span class="pure-form-message-inline"> | ||||
|          <a href="https://changedetection.io/tutorials">More help and examples about using the scheduler</a> | ||||
|         </span> | ||||
|     </div> | ||||
|     {% else %} | ||||
|         <span class="pure-form-message-inline"> | ||||
|             Want to use a time schedule? <a href="{{url_for('settings_page')}}#timedate">First confirm/save your Time Zone Settings</a> | ||||
|         </span> | ||||
|         <br> | ||||
|     {% endif %} | ||||
|  | ||||
| {% endmacro %} | ||||
| @@ -35,7 +35,9 @@ | ||||
|  | ||||
|   <body class=""> | ||||
|     <div class="header"> | ||||
|       <div class="home-menu pure-menu pure-menu-horizontal pure-menu-fixed" id="nav-menu"> | ||||
|     <div class="pure-menu-fixed" style="width: 100%;"> | ||||
|       <div class="home-menu pure-menu pure-menu-horizontal" id="nav-menu"> | ||||
|  | ||||
|         {% if has_password and not current_user.is_authenticated %} | ||||
|           <a class="pure-menu-heading" href="https://changedetection.io" rel="noopener"> | ||||
|             <strong>Change</strong>Detection.io</a> | ||||
| @@ -68,7 +70,7 @@ | ||||
|                 <a href="{{ url_for('import_page')}}" class="pure-menu-link">IMPORT</a> | ||||
|               </li> | ||||
|               <li class="pure-menu-item"> | ||||
|                 <a href="{{ url_for('get_backup')}}" class="pure-menu-link">BACKUP</a> | ||||
|                 <a href="{{ url_for('backups.index')}}" class="pure-menu-link">BACKUPS</a> | ||||
|               </li> | ||||
|             {% else %} | ||||
|               <li class="pure-menu-item"> | ||||
| @@ -129,7 +131,12 @@ | ||||
|           </li> | ||||
|         </ul> | ||||
|       </div> | ||||
|       <div id="pure-menu-horizontal-spinner"></div> | ||||
|       </div> | ||||
|  | ||||
|     </div> | ||||
|  | ||||
|  | ||||
|     {% if hosted_sticky %} | ||||
|       <div class="sticky-tab" id="hosted-sticky"> | ||||
|         <a href="https://changedetection.io/?ref={{guid}}">Let us host your instance!</a> | ||||
|   | ||||
| @@ -14,7 +14,7 @@ | ||||
|  | ||||
| <div id="settings"> | ||||
|     <form class="pure-form " action="" method="GET" id="diff-form"> | ||||
|         <fieldset> | ||||
|         <fieldset class="diff-fieldset"> | ||||
|             {% if versions|length >= 1 %} | ||||
|                 <strong>Compare</strong> | ||||
|                 <del class="change"><span>from</span></del> | ||||
| @@ -33,7 +33,7 @@ | ||||
|                         </option> | ||||
|                     {% endfor %} | ||||
|                 </select> | ||||
|                 <button type="submit" class="pure-button pure-button-primary">Go</button> | ||||
|                 <button type="submit" class="pure-button pure-button-primary reset-margin">Go</button> | ||||
|             {% endif %} | ||||
|         </fieldset> | ||||
|         <fieldset> | ||||
|   | ||||
| @@ -1,10 +1,11 @@ | ||||
| {% extends 'base.html' %} | ||||
| {% block content %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form %} | ||||
| {% from '_common_fields.html' import render_common_settings_form %} | ||||
| <script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='global-settings.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='scheduler.js')}}" defer></script> | ||||
| <script> | ||||
|     const browser_steps_available_screenshots=JSON.parse('{{ watch.get_browsersteps_available_screenshots|tojson }}'); | ||||
|     const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}'); | ||||
| @@ -26,7 +27,6 @@ | ||||
| </script> | ||||
| <script src="{{url_for('static_content', group='js', filename='plugins.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='limit.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='visual-selector.js')}}" defer></script> | ||||
| {% if playwright_enabled %} | ||||
| @@ -45,9 +45,8 @@ | ||||
|             {% if extra_tab_content %} | ||||
|             <li class="tab"><a href="#extras_tab">{{ extra_tab_content }}</a></li> | ||||
|             {% endif %} | ||||
|             {% if playwright_enabled %} | ||||
|             <li class="tab"><a id="browsersteps-tab" href="#browser-steps">Browser Steps</a></li> | ||||
|             {% endif %} | ||||
|         <!-- should goto extra forms? --> | ||||
|             {% if watch['processor'] == 'text_json_diff' %} | ||||
|             <li class="tab"><a id="visualselector-tab" href="#visualselector">Visual Filter Selector</a></li> | ||||
|             <li class="tab" id="filters-and-triggers-tab"><a href="#filters-and-triggers">Filters & Triggers</a></li> | ||||
| @@ -59,15 +58,15 @@ | ||||
|  | ||||
|     <div class="box-wrap inner"> | ||||
|         <form class="pure-form pure-form-stacked" | ||||
|               action="{{ url_for('edit_page', uuid=uuid, next = request.args.get('next'), unpause_on_save = request.args.get('unpause_on_save')) }}" method="POST"> | ||||
|               action="{{ url_for('edit_page', uuid=uuid, next = request.args.get('next'), unpause_on_save = request.args.get('unpause_on_save'), tag = request.args.get('tag')) }}" method="POST"> | ||||
|              <input type="hidden" name="csrf_token" value="{{ csrf_token() }}"> | ||||
|  | ||||
|             <div class="tab-pane-inner" id="general"> | ||||
|                 <fieldset> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.url, placeholder="https://...", required=true, class="m-d") }} | ||||
|                         <span class="pure-form-message-inline">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></span><br> | ||||
|                         <span class="pure-form-message-inline">You can use variables in the URL, perfect for inserting the current date and other logic, <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a></span><br> | ||||
|                         <div class="pure-form-message">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></div> | ||||
|                         <div class="pure-form-message">Variables are supported in the URL (<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a>).</div> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group inline-radio"> | ||||
|                         {{ render_field(form.processor) }} | ||||
| @@ -80,9 +79,24 @@ | ||||
|                         <span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group time-between-check border-fieldset"> | ||||
|                         {{ render_field(form.time_between_check, class="time-check-widget") }} | ||||
|  | ||||
|                         {{ render_checkbox_field(form.time_between_check_use_default, class="use-default-timecheck") }} | ||||
|                     </div> | ||||
|                         <br> | ||||
|                         <div id="time-check-widget-wrapper"> | ||||
|                             {{ render_field(form.time_between_check, class="time-check-widget") }} | ||||
|  | ||||
|                             <span class="pure-form-message-inline"> | ||||
|                              The interval/amount of time between each check. | ||||
|                             </span> | ||||
|                         </div> | ||||
|                         <div id="time-between-check-schedule"> | ||||
|                             <!-- Start Time and End Time --> | ||||
|                             <div id="limit-between-time"> | ||||
|                                 {{ render_time_schedule_form(form, available_timezones, timezone_default_config) }} | ||||
|                             </div> | ||||
|                         </div> | ||||
| <br> | ||||
|               </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_checkbox_field(form.extract_title_as_title) }} | ||||
|                     </div> | ||||
| @@ -150,21 +164,24 @@ | ||||
|                             {{ render_field(form.method) }} | ||||
|                         </div> | ||||
|                         <div id="request-body"> | ||||
|                                             {{ render_field(form.body, rows=5, placeholder="Example | ||||
|                                             {{ render_field(form.body, rows=7, placeholder="Example | ||||
| { | ||||
|    \"name\":\"John\", | ||||
|    \"age\":30, | ||||
|    \"car\":null | ||||
|    \"car\":null, | ||||
|    \"year\":{% now 'Europe/Berlin', '%Y' %} | ||||
| }") }} | ||||
|                         </div> | ||||
|                         <div class="pure-form-message">Variables are supported in the request body (<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a>).</div> | ||||
|                     </div> | ||||
|                 </fieldset> | ||||
|             <!-- hmm --> | ||||
|                 <div class="pure-control-group advanced-options"  style="display: none;"> | ||||
|                     {{ render_field(form.headers, rows=5, placeholder="Example | ||||
|                     {{ render_field(form.headers, rows=7, placeholder="Example | ||||
| Cookie: foobar | ||||
| User-Agent: wonderbra 1.0") }} | ||||
|  | ||||
| User-Agent: wonderbra 1.0 | ||||
| Math: {{ 1 + 1 }}") }} | ||||
|                         <div class="pure-form-message">Variables are supported in the request header values (<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a>).</div> | ||||
|                         <div class="pure-form-message-inline"> | ||||
|                             {% if has_extra_headers_file %} | ||||
|                                 <strong>Alert! Extra headers file found and will be added to this watch!</strong> | ||||
| @@ -181,8 +198,9 @@ User-Agent: wonderbra 1.0") }} | ||||
|                     </div> | ||||
|             </fieldset> | ||||
|             </div> | ||||
|             {% if playwright_enabled %} | ||||
|  | ||||
|             <div class="tab-pane-inner" id="browser-steps"> | ||||
|             {% if playwright_enabled %} | ||||
|                 <img class="beta-logo" src="{{url_for('static_content', group='images', filename='beta-logo.png')}}" alt="New beta functionality"> | ||||
|                 <fieldset> | ||||
|                     <div class="pure-control-group"> | ||||
| @@ -222,8 +240,16 @@ User-Agent: wonderbra 1.0") }} | ||||
|                         </div> | ||||
|                     </div> | ||||
|                 </fieldset> | ||||
|                 {% else %} | ||||
|                     <span class="pure-form-message-inline"> | ||||
|                         <p>Sorry, this functionality only works with Playwright/Chrome enabled watches.</p> | ||||
|                         <p>Enable the Playwright Chrome fetcher, or alternatively try our <a href="https://lemonade.changedetection.io/start">very affordable subscription based service</a>.</p> | ||||
|                         <p>This is because Selenium/WebDriver can not extract full page screenshots reliably.</p> | ||||
|                         <p>You may need to <a href="https://github.com/dgtlmoon/changedetection.io/blob/09ebc6ec6338545bdd694dc6eee57f2e9d2b8075/docker-compose.yml#L31">Enable playwright environment variable</a> and uncomment the <strong>sockpuppetbrowser</strong> from the docker-compose.yml file.</p> | ||||
|                     </span> | ||||
|                 {% endif %} | ||||
|             </div> | ||||
|             {% endif %} | ||||
|  | ||||
|  | ||||
|             <div class="tab-pane-inner" id="notifications"> | ||||
|                 <fieldset> | ||||
| @@ -330,9 +356,9 @@ nav | ||||
|                         {{ render_checkbox_field(form.filter_text_added) }} | ||||
|                         {{ render_checkbox_field(form.filter_text_replaced) }} | ||||
|                         {{ render_checkbox_field(form.filter_text_removed) }} | ||||
|                     <span class="pure-form-message-inline">Note: Depending on the length and similarity of the text on each line, the algorithm may consider an <strong>addition</strong> instead of <strong>replacement</strong> for example.</span> | ||||
|                     <span class="pure-form-message-inline">So it's always better to select <strong>Added</strong>+<strong>Replaced</strong> when you're interested in new content.</span><br> | ||||
|                     <span class="pure-form-message-inline">When content is merely moved in a list, it will also trigger an <strong>addition</strong>, consider enabling <code><strong>Only trigger when unique lines appear</strong></code></span> | ||||
|                     <span class="pure-form-message-inline">Note: Depending on the length and similarity of the text on each line, the algorithm may consider an <strong>addition</strong> instead of <strong>replacement</strong> for example.</span><br> | ||||
|                     <span class="pure-form-message-inline"> So it's always better to select <strong>Added</strong>+<strong>Replaced</strong> when you're interested in new content.</span><br> | ||||
|                     <span class="pure-form-message-inline"> When content is merely moved in a list, it will also trigger an <strong>addition</strong>, consider enabling <code><strong>Only trigger when unique lines appear</strong></code></span> | ||||
|                 </fieldset> | ||||
|                 <fieldset class="pure-control-group"> | ||||
|                     {{ render_checkbox_field(form.check_unique_lines) }} | ||||
| @@ -371,7 +397,7 @@ nav | ||||
| ") }} | ||||
|                     <span class="pure-form-message-inline"> | ||||
|                         <ul> | ||||
|                             <li>Matching text will be <strong>removed</strong> from the text snapshot</li> | ||||
|                             <li>Matching text will be <strong>ignored</strong> in the text snapshot (you can still see it but it wont trigger a change)</li> | ||||
|                             <li>Each line processed separately, any line matching will be ignored (removed before creating the checksum)</li> | ||||
|                             <li>Regular Expression support, wrap the entire line in forward slash <code>/regex/</code></li> | ||||
|                             <li>Changing this will affect the comparison checksum which may trigger an alert</li> | ||||
| @@ -398,7 +424,9 @@ Unavailable") }} | ||||
|                 </fieldset> | ||||
|                 <fieldset> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.extract_text, rows=5, placeholder="\d+ online") }} | ||||
|                         {{ render_field(form.extract_text, rows=5, placeholder="/.+?\d+ comments.+?/ | ||||
|  or | ||||
| keyword") }} | ||||
|                         <span class="pure-form-message-inline"> | ||||
|                     <ul> | ||||
|                         <li>Extracts text in the final output (line by line) after other filters using regular expressions or string match; | ||||
| @@ -424,14 +452,15 @@ Unavailable") }} | ||||
|                     </script> | ||||
|                     <br> | ||||
|                     {#<div id="text-preview-controls"><span id="text-preview-refresh" class="pure-button button-xsmall">Refresh</span></div>#} | ||||
|  | ||||
|                     <div class="minitabs-wrapper"> | ||||
|                         <div id="text-preview-inner" class="monospace-preview"> | ||||
|                             <p>Loading...</p> | ||||
|                         </div> | ||||
|                         <div id="text-preview-before-inner" style="display: none;" class="monospace-preview"> | ||||
|                             <p>Loading...</p> | ||||
|                         </div> | ||||
|                       <div class="minitabs-content"> | ||||
|                           <div id="text-preview-inner" class="monospace-preview"> | ||||
|                               <p>Loading...</p> | ||||
|                           </div> | ||||
|                           <div id="text-preview-before-inner" style="display: none;" class="monospace-preview"> | ||||
|                               <p>Loading...</p> | ||||
|                           </div> | ||||
|                       </div> | ||||
|                     </div> | ||||
|             </div> | ||||
|           </div> | ||||
| @@ -472,6 +501,7 @@ Unavailable") }} | ||||
|                                 <p>Sorry, this functionality only works with Playwright/Chrome enabled watches.</p> | ||||
|                                 <p>Enable the Playwright Chrome fetcher, or alternatively try our <a href="https://lemonade.changedetection.io/start">very affordable subscription based service</a>.</p> | ||||
|                                 <p>This is because Selenium/WebDriver can not extract full page screenshots reliably.</p> | ||||
|                                 <p>You may need to <a href="https://github.com/dgtlmoon/changedetection.io/blob/09ebc6ec6338545bdd694dc6eee57f2e9d2b8075/docker-compose.yml#L31">Enable playwright environment variable</a> and uncomment the <strong>sockpuppetbrowser</strong> from the docker-compose.yml file.</p> | ||||
|                             </span> | ||||
|                         {% endif %} | ||||
|                     </div> | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| {% extends 'base.html' %} | ||||
|  | ||||
| {% block content %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form %} | ||||
| {% from '_common_fields.html' import render_common_settings_form %} | ||||
| <script> | ||||
|     const notification_base_url="{{url_for('ajax_callback_send_notification_test', mode="global-settings")}}"; | ||||
| @@ -10,9 +10,11 @@ | ||||
| {% endif %} | ||||
| </script> | ||||
| <script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='plugins.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='global-settings.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='scheduler.js')}}" defer></script> | ||||
| <div class="edit-form"> | ||||
|     <div class="tabs collapsable"> | ||||
|         <ul> | ||||
| @@ -21,6 +23,7 @@ | ||||
|             <li class="tab"><a href="#fetching">Fetching</a></li> | ||||
|             <li class="tab"><a href="#filters">Global Filters</a></li> | ||||
|             <li class="tab"><a href="#api">API</a></li> | ||||
|             <li class="tab"><a href="#timedate">Time & Date</a></li> | ||||
|             <li class="tab"><a href="#proxies">CAPTCHA & Proxies</a></li> | ||||
|         </ul> | ||||
|     </div> | ||||
| @@ -32,6 +35,12 @@ | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.requests.form.time_between_check, class="time-check-widget") }} | ||||
|                         <span class="pure-form-message-inline">Default recheck time for all watches, current system minimum is <i>{{min_system_recheck_seconds}}</i> seconds (<a href="https://github.com/dgtlmoon/changedetection.io/wiki/Misc-system-settings#enviroment-variables">more info</a>).</span> | ||||
|                             <div id="time-between-check-schedule"> | ||||
|                                 <!-- Start Time and End Time --> | ||||
|                                 <div id="limit-between-time"> | ||||
|                                     {{ render_time_schedule_form(form.requests, available_timezones, timezone_default_config) }} | ||||
|                                 </div> | ||||
|                         </div> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.requests.form.jitter_seconds, class="jitter_seconds") }} | ||||
| @@ -172,7 +181,7 @@ nav | ||||
|                     <span class="pure-form-message-inline">Note: This is applied globally in addition to the per-watch rules.</span><br> | ||||
|                     <span class="pure-form-message-inline"> | ||||
|                         <ul> | ||||
|                             <li>Matching text will be <strong>removed</strong> from the text snapshot</li> | ||||
|                             <li>Matching text will be <strong>ignored</strong> in the text snapshot (you can still see it but it wont trigger a change)</li> | ||||
|                             <li>Note: This is applied globally in addition to the per-watch rules.</li> | ||||
|                             <li>Each line processed separately, any line matching will be ignored (removed before creating the checksum)</li> | ||||
|                             <li>Regular Expression support, wrap the entire line in forward slash <code>/regex/</code></li> | ||||
| @@ -211,6 +220,23 @@ nav | ||||
|                     </p> | ||||
|                 </div> | ||||
|             </div> | ||||
|             <div class="tab-pane-inner" id="timedate"> | ||||
|                 <div class="pure-control-group"> | ||||
|                     Ensure the settings below are correct, they are used to manage the time schedule for checking your web page watches. | ||||
|                 </div> | ||||
|                 <div class="pure-control-group"> | ||||
|                     <p><strong>UTC Time & Date from Server:</strong> <span id="utc-time" >{{ utc_time }}</span></p> | ||||
|                     <p><strong>Local Time & Date in Browser:</strong> <span class="local-time" data-utc="{{ utc_time }}"></span></p> | ||||
|                     <p> | ||||
|                        {{ render_field(form.application.form.timezone) }} | ||||
|                         <datalist id="timezones" style="display: none;"> | ||||
|                             {% for tz_name in available_timezones %} | ||||
|                                 <option value="{{ tz_name }}">{{ tz_name }}</option> | ||||
|                             {% endfor %} | ||||
|                         </datalist> | ||||
|                     </p> | ||||
|                 </div> | ||||
|             </div> | ||||
|             <div class="tab-pane-inner" id="proxies"> | ||||
|                 <div id="recommended-proxy"> | ||||
|                     <div> | ||||
| @@ -254,9 +280,7 @@ nav | ||||
|                          | ||||
|                     </div> | ||||
|                 </div> | ||||
|                 <p> | ||||
|                     Your proxy provider may need to whitelist our IP of <code>204.15.192.195</code> | ||||
|                 </p> | ||||
|  | ||||
|                <p><strong>Tip</strong>: "Residential" and "Mobile" proxy type can be more successfull than "Data Center" for blocked websites. | ||||
|  | ||||
|                 <div class="pure-control-group" id="extra-proxies-setting"> | ||||
| @@ -276,7 +300,7 @@ nav | ||||
|                 <div class="pure-control-group"> | ||||
|                     {{ render_button(form.save_button) }} | ||||
|                     <a href="{{url_for('index')}}" class="pure-button button-small button-cancel">Back</a> | ||||
|                     <a href="{{url_for('clear_all_history')}}" class="pure-button button-small button-cancel">Clear Snapshot History</a> | ||||
|                     <a href="{{url_for('clear_all_history')}}" class="pure-button button-small button-error">Clear Snapshot History</a> | ||||
|                 </div> | ||||
|             </div> | ||||
|         </form> | ||||
|   | ||||
| @@ -6,7 +6,7 @@ | ||||
|  | ||||
| <div class="box"> | ||||
|  | ||||
|     <form class="pure-form" action="{{ url_for('form_quick_watch_add') }}" method="POST" id="new-watch-form"> | ||||
|     <form class="pure-form" action="{{ url_for('form_quick_watch_add', tag=active_tag_uuid) }}" method="POST" id="new-watch-form"> | ||||
|         <input type="hidden" name="csrf_token" value="{{ csrf_token() }}" > | ||||
|         <fieldset> | ||||
|             <legend>Add a new change detection watch</legend> | ||||
| @@ -108,7 +108,8 @@ | ||||
|                     {% else %} | ||||
|                     <a class="state-on" href="{{url_for('index', op='pause', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='play.svg')}}" alt="UnPause checks" title="UnPause checks" class="icon icon-unpause" ></a> | ||||
|                     {% endif %} | ||||
|                     <a class="link-mute state-{{'on' if watch.notification_muted else 'off'}}" href="{{url_for('index', op='mute', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications" class="icon icon-mute" ></a> | ||||
|                     {% set mute_label = 'UnMute notification' if watch.notification_muted else 'Mute notification' %} | ||||
|                     <a class="link-mute state-{{'on' if watch.notification_muted else 'off'}}" href="{{url_for('index', op='mute', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="{{ mute_label }}" title="{{ mute_label }}" class="icon icon-mute" ></a> | ||||
|                 </td> | ||||
|                 <td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}} | ||||
|                     <a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}"></a> | ||||
| @@ -187,11 +188,11 @@ | ||||
|                 <td> | ||||
|                     <a {% if watch.uuid in queued_uuids %}disabled="true"{% endif %} href="{{ url_for('form_watch_checknow', uuid=watch.uuid, tag=request.args.get('tag')) }}" | ||||
|                        class="recheck pure-button pure-button-primary">{% if watch.uuid in queued_uuids %}Queued{% else %}Recheck{% endif %}</a> | ||||
|                     <a href="{{ url_for('edit_page', uuid=watch.uuid)}}#general" class="pure-button pure-button-primary">Edit</a> | ||||
|                     <a href="{{ url_for('edit_page', uuid=watch.uuid, tag=active_tag_uuid)}}#general" class="pure-button pure-button-primary">Edit</a> | ||||
|                     {% if watch.history_n >= 2 %} | ||||
|  | ||||
|                         {%  if is_unviewed %} | ||||
|                            <a href="{{ url_for('diff_history_page', uuid=watch.uuid, from_version=watch.get_next_snapshot_key_to_last_viewed) }}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">History</a> | ||||
|                            <a href="{{ url_for('diff_history_page', uuid=watch.uuid, from_version=watch.get_from_version_based_on_last_viewed) }}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">History</a> | ||||
|                         {% else %} | ||||
|                            <a href="{{ url_for('diff_history_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">History</a> | ||||
|                         {% endif %} | ||||
|   | ||||
| @@ -34,7 +34,7 @@ def test_execute_custom_js(client, live_server, measure_memory_usage): | ||||
|     assert b"unpaused" in res.data | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|     assert live_server.app.config['DATASTORE'].data['watching'][uuid].history_n >= 1, "Watch history had atleast 1 (everything fetched OK)" | ||||
|  | ||||
|     assert b"This text should be removed" not in res.data | ||||
|   | ||||
							
								
								
									
										6
									
								
								changedetectionio/tests/itemprop_test_examples/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								changedetectionio/tests/itemprop_test_examples/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| # A list of real world examples! | ||||
|  | ||||
| Always the price should be 666.66 for our tests | ||||
|  | ||||
| see test_restock_itemprop.py::test_special_prop_examples | ||||
|  | ||||
							
								
								
									
										25
									
								
								changedetectionio/tests/itemprop_test_examples/a.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										25
									
								
								changedetectionio/tests/itemprop_test_examples/a.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,25 @@ | ||||
| <div class="PriceSection PriceSection_PriceSection__Vx1_Q PriceSection_variantHuge__P9qxg PdpPriceSection" | ||||
|      data-testid="price-section" | ||||
|      data-optly-product-tile-price-section="true"><span | ||||
|         class="PriceRange ProductPrice variant-huge" itemprop="offers" | ||||
|         itemscope="" itemtype="http://schema.org/Offer"><div | ||||
|         class="VisuallyHidden_VisuallyHidden__VBD83">$155.55</div><span | ||||
|         aria-hidden="true" class="Price variant-huge" data-testid="price" | ||||
|         itemprop="price"><sup class="sup" data-testid="price-symbol" | ||||
|                               itemprop="priceCurrency" content="AUD">$</sup><span | ||||
|         class="dollars" data-testid="price-value" itemprop="price" | ||||
|         content="155.55">155.55</span><span class="extras"><span class="sup" | ||||
|                                                               data-testid="price-sup"></span></span></span></span> | ||||
| </div> | ||||
|  | ||||
| <script type="application/ld+json">{ | ||||
|                                 "@type": "Product", | ||||
|                                 "@context": "https://schema.org", | ||||
|                                 "name": "test", | ||||
|                                 "description": "test", | ||||
|                                 "offers": { | ||||
|                                     "@type": "Offer", | ||||
|                                     "priceCurrency": "AUD", | ||||
|                                     "price": 155.55 | ||||
|                                 }, | ||||
|                             }</script> | ||||
| @@ -48,7 +48,7 @@ def test_noproxy_option(client, live_server, measure_memory_usage): | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Watch added in Paused state, saving will unpause" in res.data | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|     res = client.get( | ||||
|         url_for("edit_page", uuid=uuid, unpause_on_save=1)) | ||||
|     assert b'No proxy' in res.data | ||||
|   | ||||
| @@ -16,4 +16,4 @@ def test_check_basic_change_detection_functionality(client, live_server, measure | ||||
|     ) | ||||
|  | ||||
|     assert b"1 Imported" in res.data | ||||
|     time.sleep(3) | ||||
|     wait_for_all_checks(client) | ||||
|   | ||||
| @@ -1,7 +1,8 @@ | ||||
| #!/usr/bin/env python3 | ||||
| import json | ||||
| import os | ||||
| from flask import url_for | ||||
| from changedetectionio.tests.util import live_server_setup, wait_for_all_checks | ||||
| from changedetectionio.tests.util import live_server_setup, wait_for_all_checks, extract_UUID_from_client | ||||
|  | ||||
|  | ||||
| def set_response(): | ||||
| @@ -18,7 +19,6 @@ def set_response(): | ||||
|         f.write(data) | ||||
|     time.sleep(1) | ||||
|  | ||||
|  | ||||
| def test_socks5(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|     set_response() | ||||
| @@ -79,3 +79,24 @@ def test_socks5(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     # Should see the proper string | ||||
|     assert "Awesome, you made it".encode('utf-8') in res.data | ||||
|  | ||||
|     # PROXY CHECKER WIDGET CHECK - this needs more checking | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|  | ||||
|     res = client.get( | ||||
|         url_for("check_proxies.start_check", uuid=uuid), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     # It's probably already finished super fast :( | ||||
|     #assert b"RUNNING" in res.data | ||||
|      | ||||
|     wait_for_all_checks(client) | ||||
|     res = client.get( | ||||
|         url_for("check_proxies.get_recheck_status", uuid=uuid), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"OK" in res.data | ||||
|  | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
|   | ||||
| @@ -1,9 +1,9 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import os.path | ||||
| import time | ||||
|  | ||||
| from flask import url_for | ||||
| from .util import live_server_setup, wait_for_all_checks, wait_for_notification_endpoint_output | ||||
| from changedetectionio import html_tools | ||||
|  | ||||
|  | ||||
| def set_original(excluding=None, add_line=None): | ||||
| @@ -77,6 +77,8 @@ def test_check_removed_line_contains_trigger(client, live_server, measure_memory | ||||
|  | ||||
|     # The trigger line is REMOVED,  this should trigger | ||||
|     set_original(excluding='The golden line') | ||||
|  | ||||
|     # Check in the processor here what's going on, its triggering empty-reply and no change. | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     wait_for_all_checks(client) | ||||
|     res = client.get(url_for("index")) | ||||
| @@ -111,7 +113,8 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa | ||||
|     res = client.post( | ||||
|         url_for("settings_page"), | ||||
|         data={"application-notification_title": "New ChangeDetection.io Notification - {{ watch_url }}", | ||||
|               "application-notification_body": 'triggered text was -{{triggered_text}}- 网站监测 内容更新了', | ||||
|               # triggered_text will contain multiple lines | ||||
|               "application-notification_body": 'triggered text was -{{triggered_text}}- ### 网站监测 内容更新了 ####', | ||||
|               # https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation | ||||
|               "application-notification_urls": test_notification_url, | ||||
|               "application-minutes_between_check": 180, | ||||
| @@ -151,7 +154,6 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa | ||||
|  | ||||
|     # A line thats not the trigger should not trigger anything | ||||
|     res = client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|  | ||||
|     assert b'1 watches queued for rechecking.' in res.data | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
| @@ -170,9 +172,8 @@ def test_check_add_line_contains_trigger(client, live_server, measure_memory_usa | ||||
|     assert os.path.isfile("test-datastore/notification.txt"), "Notification fired because I can see the output file" | ||||
|     with open("test-datastore/notification.txt", 'rb') as f: | ||||
|         response = f.read() | ||||
|         assert b'-Oh yes please-' in response | ||||
|         assert b'-Oh yes please' in response | ||||
|         assert '网站监测 内容更新了'.encode('utf-8') in response | ||||
|  | ||||
|  | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|   | ||||
| @@ -44,7 +44,6 @@ def set_modified_response(): | ||||
|  | ||||
|     return None | ||||
|  | ||||
|  | ||||
| def is_valid_uuid(val): | ||||
|     try: | ||||
|         uuid.UUID(str(val)) | ||||
| @@ -56,8 +55,9 @@ def is_valid_uuid(val): | ||||
| def test_setup(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
|  | ||||
| def test_api_simple(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
| #    live_server_setup(live_server) | ||||
|  | ||||
|     api_key = extract_api_key_from_UI(client) | ||||
|  | ||||
| @@ -129,6 +129,9 @@ def test_api_simple(client, live_server, measure_memory_usage): | ||||
|     assert after_recheck_info['last_checked'] != before_recheck_info['last_checked'] | ||||
|     assert after_recheck_info['last_changed'] != 0 | ||||
|  | ||||
|     # #2877 When run in a slow fetcher like playwright etc | ||||
|     assert after_recheck_info['last_changed'] ==  after_recheck_info['last_checked'] | ||||
|  | ||||
|     # Check history index list | ||||
|     res = client.get( | ||||
|         url_for("watchhistory", uuid=watch_uuid), | ||||
|   | ||||
| @@ -99,7 +99,7 @@ def test_check_ldjson_price_autodetect(client, live_server, measure_memory_usage | ||||
|     assert b'ldjson-price-track-offer' in res.data | ||||
|  | ||||
|     # Accept it | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|     #time.sleep(1) | ||||
|     client.get(url_for('price_data_follower.accept', uuid=uuid, follow_redirects=True)) | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|   | ||||
| @@ -2,7 +2,6 @@ | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| from urllib.request import urlopen | ||||
| from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, \ | ||||
|     extract_UUID_from_client | ||||
|  | ||||
| @@ -69,7 +68,7 @@ def test_check_basic_change_detection_functionality(client, live_server, measure | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|  | ||||
|     # Check the 'get latest snapshot works' | ||||
|     res = client.get(url_for("watch_get_latest_html", uuid=uuid)) | ||||
|   | ||||
| @@ -26,8 +26,24 @@ def test_backup(client, live_server, measure_memory_usage): | ||||
|     assert b"1 Imported" in res.data | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # Launch the thread in the background to create the backup | ||||
|     res = client.get( | ||||
|         url_for("get_backup"), | ||||
|         url_for("backups.request_backup"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     time.sleep(2) | ||||
|  | ||||
|     res = client.get( | ||||
|         url_for("backups.index"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     # Can see the download link to the backup | ||||
|     assert b'<a href="/backups/download/changedetection-backup-20' in res.data | ||||
|     assert b'Remove backups' in res.data | ||||
|  | ||||
|     # Get the latest one | ||||
|     res = client.get( | ||||
|         url_for("backups.download_backup", filename="latest"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
| @@ -44,3 +60,11 @@ def test_backup(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     # Should be two txt files in the archive (history and the snapshot) | ||||
|     assert len(newlist) == 2 | ||||
|  | ||||
|     # Get the latest one | ||||
|     res = client.get( | ||||
|         url_for("backups.remove_backups"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b'No backups found.' in res.data | ||||
| @@ -65,11 +65,8 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu | ||||
|     live_server_setup(live_server) | ||||
|     # Use a mix of case in ZzZ to prove it works case-insensitive. | ||||
|     ignore_text = "out of stoCk\r\nfoobar" | ||||
|  | ||||
|     set_original_ignore_response() | ||||
|  | ||||
|     # Give the endpoint time to spin up | ||||
|     time.sleep(1) | ||||
|  | ||||
|     # Add our URL to the import page | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
| @@ -127,13 +124,24 @@ def test_check_block_changedetection_text_NOT_present(client, live_server, measu | ||||
|     assert b'unviewed' not in res.data | ||||
|     assert b'/test-endpoint' in res.data | ||||
|  | ||||
|     # 2548 | ||||
|     # Going back to the ORIGINAL should NOT trigger a change | ||||
|     set_original_ignore_response() | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     wait_for_all_checks(client) | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b'unviewed' not in res.data | ||||
|  | ||||
|     # Now we set a change where the text is gone, it should now trigger | ||||
|  | ||||
|     # Now we set a change where the text is gone AND its different content, it should now trigger | ||||
|     set_modified_response_minus_block_text() | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     wait_for_all_checks(client) | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b'unviewed' in res.data | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|   | ||||
| @@ -125,8 +125,7 @@ def test_check_markup_include_filters_restriction(client, live_server, measure_m | ||||
|  | ||||
| # Tests the whole stack works with the CSS Filter | ||||
| def test_check_multiple_filters(client, live_server, measure_memory_usage): | ||||
|     sleep_time_for_fetch_thread = 3 | ||||
|  | ||||
|     #live_server_setup(live_server) | ||||
|     include_filters = "#blob-a\r\nxpath://*[contains(@id,'blob-b')]" | ||||
|  | ||||
|     with open("test-datastore/endpoint-content.txt", "w") as f: | ||||
| @@ -138,9 +137,6 @@ def test_check_multiple_filters(client, live_server, measure_memory_usage): | ||||
|      </html> | ||||
|     """) | ||||
|  | ||||
|     # Give the endpoint time to spin up | ||||
|     time.sleep(1) | ||||
|  | ||||
|     # Add our URL to the import page | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
|     res = client.post( | ||||
| @@ -149,7 +145,7 @@ def test_check_multiple_filters(client, live_server, measure_memory_usage): | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"1 Imported" in res.data | ||||
|     time.sleep(1) | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # Goto the edit page, add our ignore text | ||||
|     # Add our URL to the import page | ||||
| @@ -165,7 +161,7 @@ def test_check_multiple_filters(client, live_server, measure_memory_usage): | ||||
|     assert b"Updated watch." in res.data | ||||
|  | ||||
|     # Give the thread time to pick it up | ||||
|     time.sleep(sleep_time_for_fetch_thread) | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     res = client.get( | ||||
|         url_for("preview_page", uuid="first"), | ||||
|   | ||||
| @@ -5,12 +5,41 @@ import time | ||||
| from flask import url_for | ||||
|  | ||||
| from ..html_tools import * | ||||
| from .util import live_server_setup | ||||
| from .util import live_server_setup, wait_for_all_checks | ||||
|  | ||||
|  | ||||
| def test_setup(live_server): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
| def set_response_with_multiple_index(): | ||||
|     data= """<!DOCTYPE html> | ||||
| <html> | ||||
| <body> | ||||
|  | ||||
| <!-- NOTE!! CHROME WILL ADD TBODY HERE IF ITS NOT THERE!! --> | ||||
| <table style="width:100%"> | ||||
|   <tr> | ||||
|     <th>Person 1</th> | ||||
|     <th>Person 2</th> | ||||
|     <th>Person 3</th> | ||||
|   </tr> | ||||
|   <tr> | ||||
|     <td>Emil</td> | ||||
|     <td>Tobias</td> | ||||
|     <td>Linus</td> | ||||
|   </tr> | ||||
|   <tr> | ||||
|     <td>16</td> | ||||
|     <td>14</td> | ||||
|     <td>10</td> | ||||
|   </tr> | ||||
| </table> | ||||
| </body> | ||||
| </html> | ||||
| """ | ||||
|     with open("test-datastore/endpoint-content.txt", "w") as f: | ||||
|         f.write(data) | ||||
|  | ||||
|  | ||||
| def set_original_response(): | ||||
|     test_return_data = """<html> | ||||
| @@ -119,12 +148,10 @@ across multiple lines | ||||
|  | ||||
|  | ||||
| def test_element_removal_full(client, live_server, measure_memory_usage): | ||||
|     sleep_time_for_fetch_thread = 3 | ||||
|     #live_server_setup(live_server) | ||||
|  | ||||
|     set_original_response() | ||||
|  | ||||
|     # Give the endpoint time to spin up | ||||
|     time.sleep(1) | ||||
|  | ||||
|     # Add our URL to the import page | ||||
|     test_url = url_for("test_endpoint", _external=True) | ||||
| @@ -132,7 +159,8 @@ def test_element_removal_full(client, live_server, measure_memory_usage): | ||||
|         url_for("import_page"), data={"urls": test_url}, follow_redirects=True | ||||
|     ) | ||||
|     assert b"1 Imported" in res.data | ||||
|     time.sleep(1) | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # Goto the edit page, add the filter data | ||||
|     # Not sure why \r needs to be added - absent of the #changetext this is not necessary | ||||
|     subtractive_selectors_data = "header\r\nfooter\r\nnav\r\n#changetext" | ||||
| @@ -148,6 +176,7 @@ def test_element_removal_full(client, live_server, measure_memory_usage): | ||||
|         follow_redirects=True, | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # Check it saved | ||||
|     res = client.get( | ||||
| @@ -156,10 +185,10 @@ def test_element_removal_full(client, live_server, measure_memory_usage): | ||||
|     assert bytes(subtractive_selectors_data.encode("utf-8")) in res.data | ||||
|  | ||||
|     # Trigger a check | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     res = client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     assert b'1 watches queued for rechecking.' in res.data | ||||
|  | ||||
|     # Give the thread time to pick it up | ||||
|     time.sleep(sleep_time_for_fetch_thread) | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # so that we set the state to 'unviewed' after all the edits | ||||
|     client.get(url_for("diff_history_page", uuid="first")) | ||||
| @@ -168,11 +197,70 @@ def test_element_removal_full(client, live_server, measure_memory_usage): | ||||
|     set_modified_response() | ||||
|  | ||||
|     # Trigger a check | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     res = client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     assert b'1 watches queued for rechecking.' in res.data | ||||
|  | ||||
|     # Give the thread time to pick it up | ||||
|     time.sleep(sleep_time_for_fetch_thread) | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # There should not be an unviewed change, as changes should be removed | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b"unviewed" not in res.data | ||||
|  | ||||
| # Re #2752 | ||||
| def test_element_removal_nth_offset_no_shift(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|  | ||||
|     set_response_with_multiple_index() | ||||
|     subtractive_selectors_data = [""" | ||||
| body > table > tr:nth-child(1) > th:nth-child(2) | ||||
| body > table >  tr:nth-child(2) > td:nth-child(2) | ||||
| body > table > tr:nth-child(3) > td:nth-child(2) | ||||
| body > table > tr:nth-child(1) > th:nth-child(3) | ||||
| body > table >  tr:nth-child(2) > td:nth-child(3) | ||||
| body > table > tr:nth-child(3) > td:nth-child(3)""", | ||||
| """//body/table/tr[1]/th[2] | ||||
| //body/table/tr[2]/td[2] | ||||
| //body/table/tr[3]/td[2] | ||||
| //body/table/tr[1]/th[3] | ||||
| //body/table/tr[2]/td[3] | ||||
| //body/table/tr[3]/td[3]"""] | ||||
|  | ||||
|     for selector_list in subtractive_selectors_data: | ||||
|  | ||||
|         res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|         assert b'Deleted' in res.data | ||||
|  | ||||
|         # Add our URL to the import page | ||||
|         test_url = url_for("test_endpoint", _external=True) | ||||
|         res = client.post( | ||||
|             url_for("import_page"), data={"urls": test_url}, follow_redirects=True | ||||
|         ) | ||||
|         assert b"1 Imported" in res.data | ||||
|         wait_for_all_checks(client) | ||||
|  | ||||
|         res = client.post( | ||||
|             url_for("edit_page", uuid="first"), | ||||
|             data={ | ||||
|                 "subtractive_selectors": selector_list, | ||||
|                 "url": test_url, | ||||
|                 "tags": "", | ||||
|                 "fetch_backend": "html_requests", | ||||
|             }, | ||||
|             follow_redirects=True, | ||||
|         ) | ||||
|         assert b"Updated watch." in res.data | ||||
|         wait_for_all_checks(client) | ||||
|  | ||||
|         res = client.get( | ||||
|             url_for("preview_page", uuid="first"), | ||||
|             follow_redirects=True | ||||
|         ) | ||||
|  | ||||
|         assert b"Tobias" not in res.data | ||||
|         assert b"Linus" not in res.data | ||||
|         assert b"Person 2" not in res.data | ||||
|         assert b"Person 3" not in res.data | ||||
|         # First column should exist | ||||
|         assert b"Emil" in res.data | ||||
|  | ||||
|   | ||||
| @@ -40,7 +40,7 @@ def test_check_encoding_detection(client, live_server, measure_memory_usage): | ||||
|  | ||||
|  | ||||
|     # Content type recording worked | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|     assert live_server.app.config['DATASTORE'].data['watching'][uuid]['content-type'] == "text/html" | ||||
|  | ||||
|     res = client.get( | ||||
|   | ||||
| @@ -71,7 +71,7 @@ def test_setup(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
| def test_check_filter_multiline(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|    # live_server_setup(live_server) | ||||
|     set_multiline_response() | ||||
|  | ||||
|     # Add our URL to the import page | ||||
|   | ||||
| @@ -51,7 +51,7 @@ def run_filter_test(client, live_server, content_filter): | ||||
|     assert b"1 Imported" in res.data | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|  | ||||
|     assert live_server.app.config['DATASTORE'].data['watching'][uuid]['consecutive_filter_failures'] == 0, "No filter = No filter failure" | ||||
|  | ||||
|   | ||||
| @@ -288,7 +288,7 @@ def test_clone_tag_on_import(client, live_server, measure_memory_usage): | ||||
|     assert b'test-tag' in res.data | ||||
|     assert b'another-tag' in res.data | ||||
|  | ||||
|     watch_uuid = extract_UUID_from_client(client) | ||||
|     watch_uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|     res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True) | ||||
|  | ||||
|     assert b'Cloned' in res.data | ||||
| @@ -315,7 +315,7 @@ def test_clone_tag_on_quickwatchform_add(client, live_server, measure_memory_usa | ||||
|     assert b'test-tag' in res.data | ||||
|     assert b'another-tag' in res.data | ||||
|  | ||||
|     watch_uuid = extract_UUID_from_client(client) | ||||
|     watch_uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|     res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True) | ||||
|  | ||||
|     assert b'Cloned' in res.data | ||||
|   | ||||
| @@ -36,7 +36,7 @@ def test_ignore(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     # Give the thread time to pick it up | ||||
|     wait_for_all_checks(client) | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|     # use the highlighter endpoint | ||||
|     res = client.post( | ||||
|         url_for("highlight_submit_ignore_url", uuid=uuid), | ||||
|   | ||||
| @@ -33,13 +33,17 @@ def test_strip_regex_text_func(): | ||||
|  | ||||
|     stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines) | ||||
|  | ||||
|     assert b"but 1 lines" in stripped_content | ||||
|     assert b"igNORe-cAse text" not in stripped_content | ||||
|     assert b"but 1234 lines" not in stripped_content | ||||
|     assert b"really" not in stripped_content | ||||
|     assert b"not this" not in stripped_content | ||||
|     assert "but 1 lines" in stripped_content | ||||
|     assert "igNORe-cAse text" not in stripped_content | ||||
|     assert "but 1234 lines" not in stripped_content | ||||
|     assert "really" not in stripped_content | ||||
|     assert "not this" not in stripped_content | ||||
|  | ||||
|     # Check line number reporting | ||||
|     stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines, mode="line numbers") | ||||
|     assert stripped_content == [2, 5, 6, 7, 8, 10] | ||||
|  | ||||
|     # Check that linefeeds are preserved when there are is no matching ignores | ||||
|     content = "some text\n\nand other text\n" | ||||
|     stripped_content = html_tools.strip_ignore_text(content, ignore_lines) | ||||
|     assert content == stripped_content | ||||
|   | ||||
| @@ -22,10 +22,15 @@ def test_strip_text_func(): | ||||
|     ignore_lines = ["sometimes"] | ||||
|  | ||||
|     stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines) | ||||
|     assert "sometimes" not in stripped_content | ||||
|     assert "Some content" in stripped_content | ||||
|  | ||||
|     assert b"sometimes" not in stripped_content | ||||
|     assert b"Some content" in stripped_content | ||||
|     # Check that line feeds dont get chewed up when something is found | ||||
|     test_content = "Some initial text\n\nWhich is across multiple lines\n\nZZZZz\n\n\nSo let's see what happens." | ||||
|     ignore = ['something irrelevent but just to check', 'XXXXX', 'YYYYY', 'ZZZZZ'] | ||||
|  | ||||
|     stripped_content = html_tools.strip_ignore_text(test_content, ignore) | ||||
|     assert stripped_content == "Some initial text\n\nWhich is across multiple lines\n\n\n\nSo let's see what happens." | ||||
|  | ||||
| def set_original_ignore_response(): | ||||
|     test_return_data = """<html> | ||||
| @@ -141,8 +146,6 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|     # Just to be sure.. set a regular modified change.. | ||||
|     set_modified_original_ignore_response() | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
| @@ -153,17 +156,17 @@ def test_check_ignore_text_functionality(client, live_server, measure_memory_usa | ||||
|  | ||||
|     res = client.get(url_for("preview_page", uuid="first")) | ||||
|  | ||||
|     # Should no longer be in the preview | ||||
|     assert b'new ignore stuff' not in res.data | ||||
|     # SHOULD BE be in the preview, it was added in set_modified_original_ignore_response() | ||||
|     # and we have "new ignore stuff" in ignore_text | ||||
|     # it is only ignored, it is not removed (it will be highlighted too) | ||||
|     assert b'new ignore stuff' in res.data | ||||
|  | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
| # When adding some ignore text, it should not trigger a change, even if something else on that line changes | ||||
| def test_check_global_ignore_text_functionality(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     # Give the endpoint time to spin up | ||||
|     time.sleep(1) | ||||
|  | ||||
|     #live_server_setup(live_server) | ||||
|     ignore_text = "XXXXX\r\nYYYYY\r\nZZZZZ" | ||||
|     set_original_ignore_response() | ||||
|  | ||||
| @@ -172,6 +175,7 @@ def test_check_global_ignore_text_functionality(client, live_server, measure_mem | ||||
|         url_for("settings_page"), | ||||
|         data={ | ||||
|             "requests-time_between_check-minutes": 180, | ||||
|             "application-ignore_whitespace": "y", | ||||
|             "application-global_ignore_text": ignore_text, | ||||
|             'application-fetch_backend': "html_requests" | ||||
|         }, | ||||
| @@ -192,9 +196,7 @@ def test_check_global_ignore_text_functionality(client, live_server, measure_mem | ||||
|     # Give the thread time to pick it up | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|  | ||||
|     # Goto the edit page of the item, add our ignore text | ||||
|     # Add our URL to the import page | ||||
|     #Adding some ignore text should not trigger a change | ||||
|     res = client.post( | ||||
|         url_for("edit_page", uuid="first"), | ||||
|         data={"ignore_text": "something irrelevent but just to check", "url": test_url, 'fetch_backend': "html_requests"}, | ||||
| @@ -210,20 +212,15 @@ def test_check_global_ignore_text_functionality(client, live_server, measure_mem | ||||
|  | ||||
|     # Trigger a check | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|  | ||||
|     # Give the thread time to pick it up | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # so that we are sure everything is viewed and in a known 'nothing changed' state | ||||
|     res = client.get(url_for("diff_history_page", uuid="first")) | ||||
|  | ||||
|     # It should report nothing found (no new 'unviewed' class) | ||||
|     # It should report nothing found (no new 'unviewed' class), adding random ignore text should not cause a change | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b'unviewed' not in res.data | ||||
|     assert b'/test-endpoint' in res.data | ||||
| ##### | ||||
|  | ||||
|  | ||||
|     #  Make a change which includes the ignore text | ||||
|     # Make a change which includes the ignore text, it should be ignored and no 'change' triggered | ||||
|     # It adds text with "ZZZZzzzz" and "ZZZZ" is in the ignore list | ||||
|     set_modified_ignore_response() | ||||
|  | ||||
|     # Trigger a check | ||||
| @@ -233,6 +230,7 @@ def test_check_global_ignore_text_functionality(client, live_server, measure_mem | ||||
|  | ||||
|     # It should report nothing found (no new 'unviewed' class) | ||||
|     res = client.get(url_for("index")) | ||||
|  | ||||
|     assert b'unviewed' not in res.data | ||||
|     assert b'/test-endpoint' in res.data | ||||
|  | ||||
|   | ||||
| @@ -514,3 +514,15 @@ def test_check_jq_ext_filter(client, live_server, measure_memory_usage): | ||||
| def test_check_jqraw_ext_filter(client, live_server, measure_memory_usage): | ||||
|     if jq_support: | ||||
|         check_json_ext_filter('jq:.[] | select(.status | contains("Sold"))', client, live_server) | ||||
|  | ||||
| def test_jsonpath_BOM_utf8(client, live_server, measure_memory_usage): | ||||
|     from .. import html_tools | ||||
|  | ||||
|     # JSON string with BOM and correct double-quoted keys | ||||
|     json_str = '\ufeff{"name": "José", "emoji": "😊", "language": "中文", "greeting": "Привет"}' | ||||
|  | ||||
|     # See that we can find the second <script> one, which is not broken, and matches our filter | ||||
|     text = html_tools.extract_json_as_string(json_str, "json:$.name") | ||||
|     assert text == '"José"' | ||||
|  | ||||
|      | ||||
|   | ||||
							
								
								
									
										78
									
								
								changedetectionio/tests/test_live_preview.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										78
									
								
								changedetectionio/tests/test_live_preview.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,78 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| from flask import url_for | ||||
| from changedetectionio.tests.util import live_server_setup, wait_for_all_checks, extract_UUID_from_client | ||||
|  | ||||
|  | ||||
| def set_response(): | ||||
|  | ||||
|     data = f"""<html> | ||||
|        <body>Awesome, you made it<br> | ||||
| yeah the socks request worked<br> | ||||
| something to ignore<br> | ||||
| something to trigger<br> | ||||
|      </body> | ||||
|      </html> | ||||
|     """ | ||||
|  | ||||
|     with open("test-datastore/endpoint-content.txt", "w") as f: | ||||
|         f.write(data) | ||||
|  | ||||
| def test_content_filter_live_preview(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|     set_response() | ||||
|  | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("form_quick_watch_add"), | ||||
|         data={"url": test_url, "tags": ''}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|     res = client.post( | ||||
|         url_for("edit_page", uuid=uuid), | ||||
|         data={ | ||||
|             "include_filters": "", | ||||
|             "fetch_backend": 'html_requests', | ||||
|             "ignore_text": "something to ignore", | ||||
|             "trigger_text": "something to trigger", | ||||
|             "url": test_url, | ||||
|         }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # The endpoint is a POST and accepts the form values to override the watch preview | ||||
|     import json | ||||
|  | ||||
|     # DEFAULT OUTPUT WITHOUT ANYTHING UPDATED/CHANGED - SHOULD SEE THE WATCH DEFAULTS | ||||
|     res = client.post( | ||||
|         url_for("watch_get_preview_rendered", uuid=uuid) | ||||
|     ) | ||||
|     default_return = json.loads(res.data.decode('utf-8')) | ||||
|     assert default_return.get('after_filter') | ||||
|     assert default_return.get('before_filter') | ||||
|     assert default_return.get('ignore_line_numbers') == [3] # "something to ignore" line 3 | ||||
|     assert default_return.get('trigger_line_numbers') == [4] # "something to trigger" line 4 | ||||
|  | ||||
|     # SEND AN UPDATE AND WE SHOULD SEE THE OUTPUT CHANGE SO WE KNOW TO HIGHLIGHT NEW STUFF | ||||
|     res = client.post( | ||||
|         url_for("watch_get_preview_rendered", uuid=uuid), | ||||
|         data={ | ||||
|             "include_filters": "", | ||||
|             "fetch_backend": 'html_requests', | ||||
|             "ignore_text": "sOckS", # Also be sure case insensitive works | ||||
|             "trigger_text": "AweSOme", | ||||
|             "url": test_url, | ||||
|         }, | ||||
|     ) | ||||
|     reply = json.loads(res.data.decode('utf-8')) | ||||
|     assert reply.get('after_filter') | ||||
|     assert reply.get('before_filter') | ||||
|     assert reply.get('ignore_line_numbers') == [2]  # Ignored - "socks" on line 2 | ||||
|     assert reply.get('trigger_line_numbers') == [1]  # Triggers "Awesome" in line 1 | ||||
|  | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
| @@ -48,7 +48,7 @@ def test_check_basic_change_detection_functionality(client, live_server, measure | ||||
|     ##################### | ||||
|     client.post( | ||||
|         url_for("settings_page"), | ||||
|         data={"application-empty_pages_are_a_change": "", | ||||
|         data={"application-empty_pages_are_a_change": "", # default, OFF, they are NOT a change | ||||
|               "requests-time_between_check-minutes": 180, | ||||
|               'application-fetch_backend': "html_requests"}, | ||||
|         follow_redirects=True | ||||
| @@ -66,6 +66,14 @@ def test_check_basic_change_detection_functionality(client, live_server, measure | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b'unviewed' not in res.data | ||||
|  | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|     watch = live_server.app.config['DATASTORE'].data['watching'][uuid] | ||||
|  | ||||
|     assert watch.last_changed == 0 | ||||
|     assert watch['last_checked'] != 0 | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|     # ok now do the opposite | ||||
|  | ||||
| @@ -92,6 +100,10 @@ def test_check_basic_change_detection_functionality(client, live_server, measure | ||||
|     # A totally zero byte (#2528) response should also not trigger an error | ||||
|     set_zero_byte_response() | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|  | ||||
|     # 2877 | ||||
|     assert watch.last_changed == watch['last_checked'] | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b'unviewed' in res.data # A change should have registered because empty_pages_are_a_change is ON | ||||
|   | ||||
| @@ -6,7 +6,7 @@ from flask import url_for | ||||
| from loguru import logger | ||||
|  | ||||
| from .util import set_original_response, set_modified_response, set_more_modified_response, live_server_setup, wait_for_all_checks, \ | ||||
|     set_longer_modified_response | ||||
|     set_longer_modified_response, get_index | ||||
| from . util import  extract_UUID_from_client | ||||
| import logging | ||||
| import base64 | ||||
| @@ -29,7 +29,7 @@ def test_check_notification(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     # Re 360 - new install should have defaults set | ||||
|     res = client.get(url_for("settings_page")) | ||||
|     notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json') | ||||
|     notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json')+"?status_code=204" | ||||
|  | ||||
|     assert default_notification_body.encode() in res.data | ||||
|     assert default_notification_title.encode() in res.data | ||||
| @@ -76,7 +76,7 @@ def test_check_notification(client, live_server, measure_memory_usage): | ||||
|     testimage_png = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=' | ||||
|  | ||||
|  | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|     datastore = 'test-datastore' | ||||
|     with open(os.path.join(datastore, str(uuid), 'last-screenshot.png'), 'wb') as f: | ||||
|         f.write(base64.b64decode(testimage_png)) | ||||
| @@ -135,7 +135,14 @@ def test_check_notification(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     # Trigger a check | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     wait_for_all_checks(client) | ||||
|     time.sleep(3) | ||||
|  | ||||
|     # Check no errors were recorded | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b'notification-error' not in res.data | ||||
|  | ||||
|  | ||||
|     # Verify what was sent as a notification, this file should exist | ||||
|     with open("test-datastore/notification.txt", "r") as f: | ||||
|         notification_submission = f.read() | ||||
| @@ -284,7 +291,7 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server, measure_me | ||||
|     # CUSTOM JSON BODY CHECK for POST:// | ||||
|     set_original_response() | ||||
|     # https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#header-manipulation | ||||
|     test_notification_url = url_for('test_notification_endpoint', _external=True).replace('http://', 'post://')+"?xxx={{ watch_url }}&+custom-header=123" | ||||
|     test_notification_url = url_for('test_notification_endpoint', _external=True).replace('http://', 'post://')+"?status_code=204&xxx={{ watch_url }}&+custom-header=123&+second=hello+world%20%22space%22" | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("settings_page"), | ||||
| @@ -319,6 +326,11 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server, measure_me | ||||
|  | ||||
|     time.sleep(2) # plus extra delay for notifications to fire | ||||
|  | ||||
|  | ||||
|     # Check no errors were recorded, because we asked for 204 which is slightly uncommon but is still OK | ||||
|     res = get_index(client) | ||||
|     assert b'notification-error' not in res.data | ||||
|  | ||||
|     with open("test-datastore/notification.txt", 'r') as f: | ||||
|         x = f.read() | ||||
|         j = json.loads(x) | ||||
| @@ -326,6 +338,7 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server, measure_me | ||||
|         assert j['secret'] == 444 | ||||
|         assert j['somebug'] == '网站监测 内容更新了' | ||||
|  | ||||
|  | ||||
|     # URL check, this will always be converted to lowercase | ||||
|     assert os.path.isfile("test-datastore/notification-url.txt") | ||||
|     with open("test-datastore/notification-url.txt", 'r') as f: | ||||
| @@ -337,6 +350,7 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server, measure_me | ||||
|     with open("test-datastore/notification-headers.txt", 'r') as f: | ||||
|         notification_headers = f.read() | ||||
|         assert 'custom-header: 123' in notification_headers.lower() | ||||
|         assert 'second: hello world "space"' in notification_headers.lower() | ||||
|  | ||||
|  | ||||
|     # Should always be automatically detected as JSON content type even when we set it as 'Text' (default) | ||||
| @@ -358,7 +372,10 @@ def test_global_send_test_notification(client, live_server, measure_memory_usage | ||||
|     #live_server_setup(live_server) | ||||
|     set_original_response() | ||||
|     if os.path.isfile("test-datastore/notification.txt"): | ||||
|         os.unlink("test-datastore/notification.txt") | ||||
|         os.unlink("test-datastore/notification.txt") \ | ||||
|  | ||||
|     # 1995 UTF-8 content should be encoded | ||||
|     test_body = 'change detection is cool 网站监测 内容更新了' | ||||
|  | ||||
|     # otherwise other settings would have already existed from previous tests in this file | ||||
|     res = client.post( | ||||
| @@ -366,8 +383,7 @@ def test_global_send_test_notification(client, live_server, measure_memory_usage | ||||
|         data={ | ||||
|             "application-fetch_backend": "html_requests", | ||||
|             "application-minutes_between_check": 180, | ||||
|             #1995 UTF-8 content should be encoded | ||||
|             "application-notification_body": 'change detection is cool 网站监测 内容更新了', | ||||
|             "application-notification_body": test_body, | ||||
|             "application-notification_format": default_notification_format, | ||||
|             "application-notification_urls": "", | ||||
|             "application-notification_title": "New ChangeDetection.io Notification - {{ watch_url }}", | ||||
| @@ -397,12 +413,10 @@ def test_global_send_test_notification(client, live_server, measure_memory_usage | ||||
|     assert res.status_code != 400 | ||||
|     assert res.status_code != 500 | ||||
|  | ||||
|     # Give apprise time to fire | ||||
|     time.sleep(4) | ||||
|  | ||||
|     with open("test-datastore/notification.txt", 'r') as f: | ||||
|         x = f.read() | ||||
|         assert 'change detection is cool 网站监测 内容更新了' in x | ||||
|         assert test_body in x | ||||
|  | ||||
|     os.unlink("test-datastore/notification.txt") | ||||
|  | ||||
| @@ -429,3 +443,78 @@ def test_global_send_test_notification(client, live_server, measure_memory_usage | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     ######### Test global/system settings - When everything is deleted it should give a helpful error | ||||
|     # See #2727 | ||||
|     res = client.post( | ||||
|         url_for("ajax_callback_send_notification_test")+"?mode=global-settings", | ||||
|         data={"notification_urls": test_notification_url}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert res.status_code == 400 | ||||
|     assert b"Error: You must have atleast one watch configured for 'test notification' to work" in res.data | ||||
|  | ||||
|  | ||||
| def _test_color_notifications(client, notification_body_token): | ||||
|  | ||||
|     from changedetectionio.diff import ADDED_STYLE, REMOVED_STYLE | ||||
|  | ||||
|     set_original_response() | ||||
|  | ||||
|     if os.path.isfile("test-datastore/notification.txt"): | ||||
|         os.unlink("test-datastore/notification.txt") | ||||
|  | ||||
|  | ||||
|     test_notification_url = url_for('test_notification_endpoint', _external=True).replace('http://', 'post://')+"?xxx={{ watch_url }}&+custom-header=123" | ||||
|  | ||||
|  | ||||
|     # otherwise other settings would have already existed from previous tests in this file | ||||
|     res = client.post( | ||||
|         url_for("settings_page"), | ||||
|         data={ | ||||
|             "application-fetch_backend": "html_requests", | ||||
|             "application-minutes_between_check": 180, | ||||
|             "application-notification_body": notification_body_token, | ||||
|             "application-notification_format": "HTML Color", | ||||
|             "application-notification_urls": test_notification_url, | ||||
|             "application-notification_title": "New ChangeDetection.io Notification - {{ watch_url }}", | ||||
|         }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b'Settings updated' in res.data | ||||
|  | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
|     res = client.post( | ||||
|         url_for("form_quick_watch_add"), | ||||
|         data={"url": test_url, "tags": 'nice one'}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b"Watch added" in res.data | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     set_modified_response() | ||||
|  | ||||
|  | ||||
|     res = client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     assert b'1 watches queued for rechecking.' in res.data | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|     time.sleep(3) | ||||
|  | ||||
|     with open("test-datastore/notification.txt", 'r') as f: | ||||
|         x = f.read() | ||||
|         assert f'<span style="{REMOVED_STYLE}">Which is across multiple lines' in x | ||||
|  | ||||
|  | ||||
|     client.get( | ||||
|         url_for("form_delete", uuid="all"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
| def test_html_color_notifications(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     #live_server_setup(live_server) | ||||
|     _test_color_notifications(client, '{{diff}}') | ||||
|     _test_color_notifications(client, '{{diff_full}}') | ||||
|      | ||||
							
								
								
									
										72
									
								
								changedetectionio/tests/test_preview_endpoints.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										72
									
								
								changedetectionio/tests/test_preview_endpoints.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,72 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks | ||||
|  | ||||
|  | ||||
| # `subtractive_selectors` should still work in `source:` type requests | ||||
| def test_fetch_pdf(client, live_server, measure_memory_usage): | ||||
|     import shutil | ||||
|     shutil.copy("tests/test.pdf", "test-datastore/endpoint-test.pdf") | ||||
|  | ||||
|     live_server_setup(live_server) | ||||
|     test_url = url_for('test_pdf_endpoint', _external=True) | ||||
|     # Add our URL to the import page | ||||
|     res = client.post( | ||||
|         url_for("import_page"), | ||||
|         data={"urls": test_url}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b"1 Imported" in res.data | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     res = client.get( | ||||
|         url_for("preview_page", uuid="first"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     # PDF header should not be there (it was converted to text) | ||||
|     assert b'PDF' not in res.data[:10] | ||||
|     assert b'hello world' in res.data | ||||
|  | ||||
|     # So we know if the file changes in other ways | ||||
|     import hashlib | ||||
|     original_md5 = hashlib.md5(open("test-datastore/endpoint-test.pdf", 'rb').read()).hexdigest().upper() | ||||
|     # We should have one | ||||
|     assert len(original_md5) > 0 | ||||
|     # And it's going to be in the document | ||||
|     assert b'Document checksum - ' + bytes(str(original_md5).encode('utf-8')) in res.data | ||||
|  | ||||
|     shutil.copy("tests/test2.pdf", "test-datastore/endpoint-test.pdf") | ||||
|     changed_md5 = hashlib.md5(open("test-datastore/endpoint-test.pdf", 'rb').read()).hexdigest().upper() | ||||
|     res = client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     assert b'1 watches queued for rechecking.' in res.data | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # Now something should be ready, indicated by having a 'unviewed' class | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b'unviewed' in res.data | ||||
|  | ||||
|     # The original checksum should be not be here anymore (cdio adds it to the bottom of the text) | ||||
|  | ||||
|     res = client.get( | ||||
|         url_for("preview_page", uuid="first"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert original_md5.encode('utf-8') not in res.data | ||||
|     assert changed_md5.encode('utf-8') in res.data | ||||
|  | ||||
|     res = client.get( | ||||
|         url_for("diff_history_page", uuid="first"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert original_md5.encode('utf-8') in res.data | ||||
|     assert changed_md5.encode('utf-8') in res.data | ||||
|  | ||||
|     assert b'here is a change' in res.data | ||||
| @@ -45,7 +45,7 @@ def test_headers_in_request(client, live_server, measure_memory_usage): | ||||
|               "url": test_url, | ||||
|               "tags": "", | ||||
|               "fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests', | ||||
|               "headers": "xxx:ooo\ncool:yeah\r\ncookie:"+cookie_header}, | ||||
|               "headers": "jinja2:{{ 1+1 }}\nxxx:ooo\ncool:yeah\r\ncookie:"+cookie_header}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
| @@ -61,6 +61,7 @@ def test_headers_in_request(client, live_server, measure_memory_usage): | ||||
|     ) | ||||
|  | ||||
|     # Flask will convert the header key to uppercase | ||||
|     assert b"Jinja2:2" in res.data | ||||
|     assert b"Xxx:ooo" in res.data | ||||
|     assert b"Cool:yeah" in res.data | ||||
|  | ||||
| @@ -117,7 +118,8 @@ def test_body_in_request(client, live_server, measure_memory_usage): | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # Now the change which should trigger a change | ||||
|     body_value = 'Test Body Value' | ||||
|     body_value = 'Test Body Value {{ 1+1 }}' | ||||
|     body_value_formatted = 'Test Body Value 2' | ||||
|     res = client.post( | ||||
|         url_for("edit_page", uuid="first"), | ||||
|         data={ | ||||
| @@ -140,8 +142,9 @@ def test_body_in_request(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     # If this gets stuck something is wrong, something should always be there | ||||
|     assert b"No history found" not in res.data | ||||
|     # We should see what we sent in the reply | ||||
|     assert str.encode(body_value) in res.data | ||||
|     # We should see the formatted value of what we sent in the reply | ||||
|     assert str.encode(body_value) not in res.data | ||||
|     assert str.encode(body_value_formatted) in res.data | ||||
|  | ||||
|     ####### data sanity checks | ||||
|     # Add the test URL twice, we will check | ||||
| @@ -370,13 +373,14 @@ def test_headers_textfile_in_request(client, live_server, measure_memory_usage): | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     with open('test-datastore/headers-testtag.txt', 'w') as f: | ||||
|         f.write("tag-header: test") | ||||
|         f.write("tag-header: test\r\nurl-header: http://example.com") | ||||
|  | ||||
|     with open('test-datastore/headers.txt', 'w') as f: | ||||
|         f.write("global-header: nice\r\nnext-global-header: nice") | ||||
|         f.write("global-header: nice\r\nnext-global-header: nice\r\nurl-header-global: http://example.com/global") | ||||
|  | ||||
|     with open('test-datastore/' + extract_UUID_from_client(client) + '/headers.txt', 'w') as f: | ||||
|         f.write("watch-header: nice") | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|     with open(f'test-datastore/{uuid}/headers.txt', 'w') as f: | ||||
|         f.write("watch-header: nice\r\nurl-header-watch: http://example.com/watch") | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
| @@ -407,6 +411,9 @@ def test_headers_textfile_in_request(client, live_server, measure_memory_usage): | ||||
|     assert b"Xxx:ooo" in res.data | ||||
|     assert b"Watch-Header:nice" in res.data | ||||
|     assert b"Tag-Header:test" in res.data | ||||
|     assert b"Url-Header:http://example.com" in res.data | ||||
|     assert b"Url-Header-Global:http://example.com/global" in res.data | ||||
|     assert b"Url-Header-Watch:http://example.com/watch" in res.data | ||||
|  | ||||
|     # Check the custom UA from system settings page made it through | ||||
|     if os.getenv('PLAYWRIGHT_DRIVER_URL'): | ||||
|   | ||||
| @@ -3,7 +3,7 @@ import os | ||||
| import time | ||||
|  | ||||
| from flask import url_for | ||||
| from .util import live_server_setup, wait_for_all_checks, extract_UUID_from_client, wait_for_notification_endpoint_output | ||||
| from .util import live_server_setup, wait_for_all_checks, wait_for_notification_endpoint_output, extract_UUID_from_client | ||||
| from ..notification import default_notification_format | ||||
|  | ||||
| instock_props = [ | ||||
| @@ -189,6 +189,17 @@ def _run_test_minmax_limit(client, extra_watch_edit_form): | ||||
|  | ||||
|     client.get(url_for("mark_all_viewed")) | ||||
|  | ||||
|  | ||||
|     # 2715 - Price detection (once it crosses the "lower" threshold) again with a lower price - should trigger again! | ||||
|     set_original_response(props_markup=instock_props[0], price='820.45') | ||||
|     res = client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     assert b'1 watches queued for rechecking.' in res.data | ||||
|     wait_for_all_checks(client) | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b'820.45' in res.data | ||||
|     assert b'unviewed' in res.data | ||||
|     client.get(url_for("mark_all_viewed")) | ||||
|  | ||||
|     # price changed to something MORE than max (1100.10), SHOULD be a change | ||||
|     set_original_response(props_markup=instock_props[0], price='1890.45') | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
| @@ -203,7 +214,7 @@ def _run_test_minmax_limit(client, extra_watch_edit_form): | ||||
|  | ||||
|  | ||||
| def test_restock_itemprop_minmax(client, live_server): | ||||
| #    live_server_setup(live_server) | ||||
|     #live_server_setup(live_server) | ||||
|     extras = { | ||||
|         "restock_settings-follow_price_changes": "y", | ||||
|         "restock_settings-price_change_min": 900.0, | ||||
| @@ -367,6 +378,12 @@ def test_change_with_notification_values(client, live_server): | ||||
|         assert "new price 1950.45" in notification | ||||
|         assert "title new price 1950.45" in notification | ||||
|  | ||||
|     ## Now test the "SEND TEST NOTIFICATION" is working | ||||
|     os.unlink("test-datastore/notification.txt") | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|     res = client.post(url_for("ajax_callback_send_notification_test", watch_uuid=uuid), data={}, follow_redirects=True) | ||||
|     time.sleep(5) | ||||
|     assert os.path.isfile("test-datastore/notification.txt"), "Notification received" | ||||
|  | ||||
|  | ||||
| def test_data_sanity(client, live_server): | ||||
| @@ -413,3 +430,31 @@ def test_data_sanity(client, live_server): | ||||
|     res = client.get( | ||||
|         url_for("edit_page", uuid="first")) | ||||
|     assert test_url2.encode('utf-8') in res.data | ||||
|  | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
| # All examples should give a prive of 666.66 | ||||
| def test_special_prop_examples(client, live_server): | ||||
|     import glob | ||||
|     #live_server_setup(live_server) | ||||
|  | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
|     check_path = os.path.join(os.path.dirname(__file__), "itemprop_test_examples", "*.txt") | ||||
|     files = glob.glob(check_path) | ||||
|     assert files | ||||
|     for test_example_filename in files: | ||||
|         with open(test_example_filename, 'r') as example_f: | ||||
|             with open("test-datastore/endpoint-content.txt", "w") as test_f: | ||||
|                 test_f.write(f"<html><body>{example_f.read()}</body></html>") | ||||
|  | ||||
|             # Now fetch it and check the price worked | ||||
|             client.post( | ||||
|                 url_for("form_quick_watch_add"), | ||||
|                 data={"url": test_url, "tags": 'restock tests', 'processor': 'restock_diff'}, | ||||
|                 follow_redirects=True | ||||
|             ) | ||||
|             wait_for_all_checks(client) | ||||
|             res = client.get(url_for("index")) | ||||
|             assert b'ception' not in res.data | ||||
|             assert b'155.55' in res.data | ||||
|   | ||||
| @@ -132,7 +132,7 @@ def test_rss_xpath_filtering(client, live_server, measure_memory_usage): | ||||
|     ) | ||||
|     assert b"Watch added in Paused state, saving will unpause" in res.data | ||||
|  | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|     res = client.post( | ||||
|         url_for("edit_page", uuid=uuid, unpause_on_save=1), | ||||
|         data={ | ||||
|   | ||||
							
								
								
									
										179
									
								
								changedetectionio/tests/test_scheduler.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										179
									
								
								changedetectionio/tests/test_scheduler.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,179 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from datetime import datetime, timezone | ||||
| from zoneinfo import ZoneInfo | ||||
| from flask import url_for | ||||
| from .util import  live_server_setup, wait_for_all_checks, extract_UUID_from_client | ||||
|  | ||||
| def test_setup(client, live_server): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
| def test_check_basic_scheduler_functionality(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|     days = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday'] | ||||
|     test_url = url_for('test_random_content_endpoint', _external=True) | ||||
|  | ||||
|     # We use "Pacific/Kiritimati" because its the furthest +14 hours, so it might show up more interesting bugs | ||||
|     # The rest of the actual functionality should be covered in the unit-test  unit/test_scheduler.py | ||||
|     ##################### | ||||
|     res = client.post( | ||||
|         url_for("settings_page"), | ||||
|         data={"application-empty_pages_are_a_change": "", | ||||
|               "requests-time_between_check-seconds": 1, | ||||
|               "application-timezone": "Pacific/Kiritimati",  # Most Forward Time Zone (UTC+14:00) | ||||
|               'application-fetch_backend': "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b"Settings updated." in res.data | ||||
|  | ||||
|     res = client.get(url_for("settings_page")) | ||||
|     assert b'Pacific/Kiritimati' in res.data | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("import_page"), | ||||
|         data={"urls": test_url}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b"1 Imported" in res.data | ||||
|     wait_for_all_checks(client) | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|  | ||||
|     # Setup all the days of the weeks using XXX as the placeholder for monday/tuesday/etc | ||||
|  | ||||
|     tpl = { | ||||
|         "time_schedule_limit-XXX-start_time": "00:00", | ||||
|         "time_schedule_limit-XXX-duration-hours": 24, | ||||
|         "time_schedule_limit-XXX-duration-minutes": 0, | ||||
|         "time_schedule_limit-XXX-enabled": '',  # All days are turned off | ||||
|         "time_schedule_limit-enabled": 'y',  # Scheduler is enabled, all days however are off. | ||||
|     } | ||||
|  | ||||
|     scheduler_data = {} | ||||
|     for day in days: | ||||
|         for key, value in tpl.items(): | ||||
|             # Replace "XXX" with the current day in the key | ||||
|             new_key = key.replace("XXX", day) | ||||
|             scheduler_data[new_key] = value | ||||
|  | ||||
|     last_check = live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] | ||||
|     data = { | ||||
|         "url": test_url, | ||||
|         "fetch_backend": "html_requests" | ||||
|     } | ||||
|     data.update(scheduler_data) | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("edit_page", uuid="first"), | ||||
|         data=data, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|  | ||||
|     res = client.get(url_for("edit_page", uuid="first")) | ||||
|     assert b"Pacific/Kiritimati" in res.data, "Should be Pacific/Kiritimati in placeholder data" | ||||
|  | ||||
|     # "Edit" should not trigger a check because it's not enabled in the schedule. | ||||
|     time.sleep(2) | ||||
|     assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] == last_check | ||||
|  | ||||
|     # Enabling today in Kiritimati should work flawless | ||||
|     kiritimati_time = datetime.now(timezone.utc).astimezone(ZoneInfo("Pacific/Kiritimati")) | ||||
|     kiritimati_time_day_of_week = kiritimati_time.strftime("%A").lower() | ||||
|     live_server.app.config['DATASTORE'].data['watching'][uuid]["time_schedule_limit"][kiritimati_time_day_of_week]["enabled"] = True | ||||
|     time.sleep(3) | ||||
|     assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] != last_check | ||||
|  | ||||
|     # Cleanup everything | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
|  | ||||
| def test_check_basic_global_scheduler_functionality(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|     days = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday'] | ||||
|     test_url = url_for('test_random_content_endpoint', _external=True) | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("import_page"), | ||||
|         data={"urls": test_url}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b"1 Imported" in res.data | ||||
|     wait_for_all_checks(client) | ||||
|     uuid = next(iter(live_server.app.config['DATASTORE'].data['watching'])) | ||||
|  | ||||
|     # Setup all the days of the weeks using XXX as the placeholder for monday/tuesday/etc | ||||
|  | ||||
|     tpl = { | ||||
|         "requests-time_schedule_limit-XXX-start_time": "00:00", | ||||
|         "requests-time_schedule_limit-XXX-duration-hours": 24, | ||||
|         "requests-time_schedule_limit-XXX-duration-minutes": 0, | ||||
|         "requests-time_schedule_limit-XXX-enabled": '',  # All days are turned off | ||||
|         "requests-time_schedule_limit-enabled": 'y',  # Scheduler is enabled, all days however are off. | ||||
|     } | ||||
|  | ||||
|     scheduler_data = {} | ||||
|     for day in days: | ||||
|         for key, value in tpl.items(): | ||||
|             # Replace "XXX" with the current day in the key | ||||
|             new_key = key.replace("XXX", day) | ||||
|             scheduler_data[new_key] = value | ||||
|  | ||||
|     data = { | ||||
|         "application-empty_pages_are_a_change": "", | ||||
|         "application-timezone": "Pacific/Kiritimati",  # Most Forward Time Zone (UTC+14:00) | ||||
|         'application-fetch_backend': "html_requests", | ||||
|         "requests-time_between_check-hours": 0, | ||||
|         "requests-time_between_check-minutes": 0, | ||||
|         "requests-time_between_check-seconds": 1, | ||||
|     } | ||||
|     data.update(scheduler_data) | ||||
|  | ||||
|     ##################### | ||||
|     res = client.post( | ||||
|         url_for("settings_page"), | ||||
|         data=data, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b"Settings updated." in res.data | ||||
|  | ||||
|     res = client.get(url_for("settings_page")) | ||||
|     assert b'Pacific/Kiritimati' in res.data | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # UI Sanity check | ||||
|  | ||||
|     res = client.get(url_for("edit_page", uuid="first")) | ||||
|     assert b"Pacific/Kiritimati" in res.data, "Should be Pacific/Kiritimati in placeholder data" | ||||
|  | ||||
|     #### HITTING SAVE SHOULD NOT TRIGGER A CHECK | ||||
|     last_check = live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] | ||||
|     res = client.post( | ||||
|         url_for("edit_page", uuid="first"), | ||||
|         data={ | ||||
|             "url": test_url, | ||||
|             "fetch_backend": "html_requests", | ||||
|             "time_between_check_use_default": "y"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|     time.sleep(2) | ||||
|     assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] == last_check | ||||
|  | ||||
|     # Enabling "today" in Kiritimati time should make the system check that watch | ||||
|     kiritimati_time = datetime.now(timezone.utc).astimezone(ZoneInfo("Pacific/Kiritimati")) | ||||
|     kiritimati_time_day_of_week = kiritimati_time.strftime("%A").lower() | ||||
|     live_server.app.config['DATASTORE'].data['settings']['requests']['time_schedule_limit'][kiritimati_time_day_of_week]["enabled"] = True | ||||
|  | ||||
|     time.sleep(3) | ||||
|     assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] != last_check | ||||
|  | ||||
|     # Cleanup everything | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
| @@ -1,9 +1,7 @@ | ||||
| import os | ||||
|  | ||||
| from flask import url_for | ||||
| from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks | ||||
| import time | ||||
|  | ||||
| from .util import live_server_setup, wait_for_all_checks | ||||
| from .. import strtobool | ||||
|  | ||||
|  | ||||
| @@ -61,32 +59,44 @@ def test_bad_access(client, live_server, measure_memory_usage): | ||||
|     assert b'Watch protocol is not permitted by SAFE_PROTOCOL_REGEX' in res.data | ||||
|  | ||||
|  | ||||
| def test_file_access(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
| def _runner_test_various_file_slash(client, file_uri): | ||||
|  | ||||
|     test_file_path = "/tmp/test-file.txt" | ||||
|  | ||||
|     # file:// is permitted by default, but it will be caught by ALLOW_FILE_URI | ||||
|     client.post( | ||||
|         url_for("form_quick_watch_add"), | ||||
|         data={"url": f"file://{test_file_path}", "tags": ''}, | ||||
|         data={"url": file_uri, "tags": ''}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     wait_for_all_checks(client) | ||||
|     res = client.get(url_for("index")) | ||||
|  | ||||
|     substrings = [b"URLs with hostname components are not permitted", b"No connection adapters were found for"] | ||||
|  | ||||
|  | ||||
|     # If it is enabled at test time | ||||
|     if strtobool(os.getenv('ALLOW_FILE_URI', 'false')): | ||||
|         res = client.get( | ||||
|             url_for("preview_page", uuid="first"), | ||||
|             follow_redirects=True | ||||
|         ) | ||||
|         if file_uri.startswith('file:///'): | ||||
|             # This one should be the full qualified path to the file and should get the contents of this file | ||||
|             res = client.get( | ||||
|                 url_for("preview_page", uuid="first"), | ||||
|                 follow_redirects=True | ||||
|             ) | ||||
|             assert b'_runner_test_various_file_slash' in res.data | ||||
|         else: | ||||
|             # This will give some error from requests or if it went to chrome, will give some other error :-) | ||||
|             assert any(s in res.data for s in substrings) | ||||
|  | ||||
|         # Should see something (this file added by run_basic_tests.sh) | ||||
|         assert b"Hello world" in res.data | ||||
|     else: | ||||
|         # Default should be here | ||||
|         assert b'file:// type access is denied for security reasons.' in res.data | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
| def test_file_slash_access(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|  | ||||
|     # file: is NOT permitted by default, so it will be caught by ALLOW_FILE_URI check | ||||
|  | ||||
|     test_file_path = os.path.abspath(__file__) | ||||
|     _runner_test_various_file_slash(client, file_uri=f"file://{test_file_path}") | ||||
|     _runner_test_various_file_slash(client, file_uri=f"file:/{test_file_path}") | ||||
|     _runner_test_various_file_slash(client, file_uri=f"file:{test_file_path}") # CVE-2024-56509 | ||||
|  | ||||
| def test_xss(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|   | ||||
							
								
								
									
										53
									
								
								changedetectionio/tests/unit/test_scheduler.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										53
									
								
								changedetectionio/tests/unit/test_scheduler.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,53 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| # run from dir above changedetectionio/ dir | ||||
| # python3 -m unittest changedetectionio.tests.unit.test_jinja2_security | ||||
|  | ||||
| import unittest | ||||
| from datetime import datetime, timedelta | ||||
| from zoneinfo import ZoneInfo | ||||
|  | ||||
| class TestScheduler(unittest.TestCase): | ||||
|  | ||||
|     # UTC+14:00 (Line Islands, Kiribati) is the farthest ahead, always ahead of UTC. | ||||
|     # UTC-12:00 (Baker Island, Howland Island) is the farthest behind, always one calendar day behind UTC. | ||||
|  | ||||
|     def test_timezone_basic_time_within_schedule(self): | ||||
|         from changedetectionio import time_handler | ||||
|  | ||||
|         timezone_str = 'Europe/Berlin' | ||||
|         debug_datetime = datetime.now(ZoneInfo(timezone_str)) | ||||
|         day_of_week = debug_datetime.strftime('%A') | ||||
|         time_str = str(debug_datetime.hour)+':00' | ||||
|         duration = 60  # minutes | ||||
|  | ||||
|         # The current time should always be within 60 minutes of [time_hour]:00 | ||||
|         result = time_handler.am_i_inside_time(day_of_week=day_of_week, | ||||
|                                                time_str=time_str, | ||||
|                                                timezone_str=timezone_str, | ||||
|                                                duration=duration) | ||||
|  | ||||
|         self.assertEqual(result, True, f"{debug_datetime} is within time scheduler {day_of_week} {time_str} in {timezone_str} for {duration} minutes") | ||||
|  | ||||
|     def test_timezone_basic_time_outside_schedule(self): | ||||
|         from changedetectionio import time_handler | ||||
|  | ||||
|         timezone_str = 'Europe/Berlin' | ||||
|         # We try a date in the future.. | ||||
|         debug_datetime = datetime.now(ZoneInfo(timezone_str))+ timedelta(days=-1) | ||||
|         day_of_week = debug_datetime.strftime('%A') | ||||
|         time_str = str(debug_datetime.hour) + ':00' | ||||
|         duration = 60*24  # minutes | ||||
|  | ||||
|         # The current time should always be within 60 minutes of [time_hour]:00 | ||||
|         result = time_handler.am_i_inside_time(day_of_week=day_of_week, | ||||
|                                                time_str=time_str, | ||||
|                                                timezone_str=timezone_str, | ||||
|                                                duration=duration) | ||||
|  | ||||
|         self.assertNotEqual(result, True, | ||||
|                          f"{debug_datetime} is NOT within time scheduler {day_of_week} {time_str} in {timezone_str} for {duration} minutes") | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										64
									
								
								changedetectionio/tests/unit/test_semver.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								changedetectionio/tests/unit/test_semver.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,64 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| # run from dir above changedetectionio/ dir | ||||
| # python3 -m unittest changedetectionio.tests.unit.test_semver | ||||
|  | ||||
| import re | ||||
| import unittest | ||||
|  | ||||
|  | ||||
| # The SEMVER regex | ||||
| SEMVER_REGEX = r"^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$" | ||||
|  | ||||
| # Compile the regex | ||||
| semver_pattern = re.compile(SEMVER_REGEX) | ||||
|  | ||||
| class TestSemver(unittest.TestCase): | ||||
|     def test_valid_versions(self): | ||||
|         """Test valid semantic version strings""" | ||||
|         valid_versions = [ | ||||
|             "1.0.0", | ||||
|             "0.1.0", | ||||
|             "0.0.1", | ||||
|             "1.0.0-alpha", | ||||
|             "1.0.0-alpha.1", | ||||
|             "1.0.0-0.3.7", | ||||
|             "1.0.0-x.7.z.92", | ||||
|             "1.0.0-alpha+001", | ||||
|             "1.0.0+20130313144700", | ||||
|             "1.0.0-beta+exp.sha.5114f85" | ||||
|         ] | ||||
|         for version in valid_versions: | ||||
|             with self.subTest(version=version): | ||||
|                 self.assertIsNotNone(semver_pattern.match(version), f"Version {version} should be valid") | ||||
|  | ||||
|     def test_invalid_versions(self): | ||||
|         """Test invalid semantic version strings""" | ||||
|         invalid_versions = [ | ||||
|             "0.48.06", | ||||
|             "1.0", | ||||
|             "1.0.0-", | ||||
| # Seems to pass the semver.org regex? | ||||
| #            "1.0.0-alpha-", | ||||
|             "1.0.0+", | ||||
|             "1.0.0-alpha+", | ||||
|             "1.0.0-", | ||||
|             "01.0.0", | ||||
|             "1.01.0", | ||||
|             "1.0.01", | ||||
|             ".1.0.0", | ||||
|             "1..0.0" | ||||
|         ] | ||||
|         for version in invalid_versions: | ||||
|             with self.subTest(version=version): | ||||
|                 res = semver_pattern.match(version) | ||||
|                 self.assertIsNone(res, f"Version '{version}' should be invalid") | ||||
|  | ||||
|     def test_our_version(self): | ||||
|         from changedetectionio import get_version | ||||
|         our_version = get_version() | ||||
|         self.assertIsNotNone(semver_pattern.match(our_version), f"Our version '{our_version}' should be a valid SEMVER string") | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -16,39 +16,50 @@ class TestDiffBuilder(unittest.TestCase): | ||||
|         watch = Watch.model(datastore_path='/tmp', default={}) | ||||
|         watch.ensure_data_dir_exists() | ||||
|  | ||||
|  | ||||
|         # Contents from the browser are always returned from the browser/requests/etc as str, str is basically UTF-16 in python | ||||
|         watch.save_history_text(contents="hello world", timestamp=100, snapshot_id=str(uuid_builder.uuid4())) | ||||
|         watch.save_history_text(contents="hello world", timestamp=105, snapshot_id=str(uuid_builder.uuid4())) | ||||
|         watch.save_history_text(contents="hello world", timestamp=109, snapshot_id=str(uuid_builder.uuid4())) | ||||
|         watch.save_history_text(contents="hello world", timestamp=112, snapshot_id=str(uuid_builder.uuid4())) | ||||
|         watch.save_history_text(contents="hello world", timestamp=115, snapshot_id=str(uuid_builder.uuid4())) | ||||
|         watch.save_history_text(contents="hello world", timestamp=117, snapshot_id=str(uuid_builder.uuid4())) | ||||
|      | ||||
|         p = watch.get_from_version_based_on_last_viewed | ||||
|         assert p == "100", "Correct 'last viewed' timestamp was detected" | ||||
|  | ||||
|         watch['last_viewed'] = 110 | ||||
|         p = watch.get_from_version_based_on_last_viewed | ||||
|         assert p == "109", "Correct 'last viewed' timestamp was detected" | ||||
|  | ||||
|         watch.save_history_text(contents=b"hello world", timestamp=100, snapshot_id=str(uuid_builder.uuid4())) | ||||
|         watch.save_history_text(contents=b"hello world", timestamp=105, snapshot_id=str(uuid_builder.uuid4())) | ||||
|         watch.save_history_text(contents=b"hello world", timestamp=109, snapshot_id=str(uuid_builder.uuid4())) | ||||
|         watch.save_history_text(contents=b"hello world", timestamp=112, snapshot_id=str(uuid_builder.uuid4())) | ||||
|         watch.save_history_text(contents=b"hello world", timestamp=115, snapshot_id=str(uuid_builder.uuid4())) | ||||
|         watch.save_history_text(contents=b"hello world", timestamp=117, snapshot_id=str(uuid_builder.uuid4())) | ||||
|  | ||||
|         p = watch.get_next_snapshot_key_to_last_viewed | ||||
|         assert p == "112", "Correct last-viewed timestamp was detected" | ||||
|  | ||||
|         # When there is only one step of difference from the end of the list, it should return second-last change | ||||
|         watch['last_viewed'] = 116 | ||||
|         p = watch.get_next_snapshot_key_to_last_viewed | ||||
|         assert p == "115", "Correct 'second last' last-viewed timestamp was detected when using the last timestamp" | ||||
|         p = watch.get_from_version_based_on_last_viewed | ||||
|         assert p == "115", "Correct 'last viewed' timestamp was detected" | ||||
|  | ||||
|         watch['last_viewed'] = 99 | ||||
|         p = watch.get_next_snapshot_key_to_last_viewed | ||||
|         assert p == "100" | ||||
|         p = watch.get_from_version_based_on_last_viewed | ||||
|         assert p == "100", "When the 'last viewed' timestamp is less than the oldest snapshot, return oldest" | ||||
|  | ||||
|         watch['last_viewed'] = 200 | ||||
|         p = watch.get_next_snapshot_key_to_last_viewed | ||||
|         assert p == "115", "When the 'last viewed' timestamp is greater than the newest snapshot, return second last " | ||||
|         p = watch.get_from_version_based_on_last_viewed | ||||
|         assert p == "115", "When the 'last viewed' timestamp is greater than the newest snapshot, return second newest" | ||||
|  | ||||
|         watch['last_viewed'] = 109 | ||||
|         p = watch.get_next_snapshot_key_to_last_viewed | ||||
|         p = watch.get_from_version_based_on_last_viewed | ||||
|         assert p == "109", "Correct when its the same time" | ||||
|  | ||||
|         # new empty one | ||||
|         watch = Watch.model(datastore_path='/tmp', default={}) | ||||
|         p = watch.get_next_snapshot_key_to_last_viewed | ||||
|         p = watch.get_from_version_based_on_last_viewed | ||||
|         assert p == None, "None when no history available" | ||||
|  | ||||
|         watch.save_history_text(contents="hello world", timestamp=100, snapshot_id=str(uuid_builder.uuid4())) | ||||
|         p = watch.get_from_version_based_on_last_viewed | ||||
|         assert p == "100", "Correct with only one history snapshot" | ||||
|  | ||||
|         watch['last_viewed'] = 200 | ||||
|         p = watch.get_from_version_based_on_last_viewed | ||||
|         assert p == "100", "Correct with only one history snapshot" | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
|   | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user