mirror of
				https://github.com/dgtlmoon/changedetection.io.git
				synced 2025-10-31 06:37:41 +00:00 
			
		
		
		
	Compare commits
	
		
			90 Commits
		
	
	
		
			filter-fai
			...
			ui-mobile-
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 7c7f6ac182 | ||
|   | 5b34aece96 | ||
|   | 1b625dc18a | ||
|   | 367afc81e9 | ||
|   | ddfbef6db3 | ||
|   | e173954cdd | ||
|   | e830fb2320 | ||
|   | c6589ee1b4 | ||
|   | dc936a2e8a | ||
|   | 8c1527c1ad | ||
|   | a5ff1cd1d7 | ||
|   | 543cb205d2 | ||
|   | 273adfa0a4 | ||
|   | 8ecfd17973 | ||
|   | 19f3851c9d | ||
|   | 7f2fa20318 | ||
|   | e16814e40b | ||
|   | 337fcab3f1 | ||
|   | eaccd6026c | ||
|   | 5b70625eaa | ||
|   | 60d292107d | ||
|   | 1cb38347da | ||
|   | 55fe2abf42 | ||
|   | 4225900ec3 | ||
|   | 1fb4342488 | ||
|   | 7071df061a | ||
|   | 6dd1fa2b88 | ||
|   | 371f85d544 | ||
|   | 932cf15e1e | ||
|   | bf0d410d32 | ||
|   | 730f37c7ba | ||
|   | 8a35d62e02 | ||
|   | f527744024 | ||
|   | 71c9b1273c | ||
|   | ec68450df1 | ||
|   | 2fd762a783 | ||
|   | d7e85ffe8f | ||
|   | d23a301826 | ||
|   | 3ce6096fdb | ||
|   | 8acdcdd861 | ||
|   | 755cba33de | ||
|   | 8aae7dfae0 | ||
|   | ed00f67a80 | ||
|   | 44e7e142f8 | ||
|   | fe704e05a3 | ||
|   | e756e0af5e | ||
|   | c0b6c8581e | ||
|   | de558f208f | ||
|   | 321426dea2 | ||
|   | bde27c8a8f | ||
|   | 1405e962f0 | ||
|   | a9f10946f4 | ||
|   | 6f2186b442 | ||
|   | cf0ff26275 | ||
|   | cffb6d748c | ||
|   | 99b0935b42 | ||
|   | f1853b0ce7 | ||
|   | c331612a22 | ||
|   | 445bb0dde3 | ||
|   | 8f3a6a42bc | ||
|   | 732ae1d935 | ||
|   | 5437144dff | ||
|   | ed38012c6e | ||
|   | f07ff9b55e | ||
|   | 1c46914992 | ||
|   | e9c4037178 | ||
|   | 1af342ef64 | ||
|   | e09ee7da97 | ||
|   | 09bc24ff34 | ||
|   | a1d04bb37f | ||
|   | 01f910f840 | ||
|   | bed16009bb | ||
|   | faeed78ffb | ||
|   | 5d9081ccb2 | ||
|   | 2cf1829073 | ||
|   | 526551a205 | ||
|   | ba139e7f3f | ||
|   | 13e343f9da | ||
|   | 13be4623db | ||
|   | 3b19e3d2bf | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | ce42f8ea26 | ||
|   | 343e359b39 | ||
|   | ffd160ce0e | ||
|   | d31fc860cc | ||
|   | 90b357f457 | ||
|   | cc147be76e | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 8ae5ed76ce | ||
|   | a9ed113369 | ||
|   | eacf920b9a | ||
|   | c9af9b6374 | 
							
								
								
									
										8
									
								
								.github/workflows/containers.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								.github/workflows/containers.yml
									
									
									
									
										vendored
									
									
								
							| @@ -88,14 +88,14 @@ jobs: | ||||
|       - name: Build and push :dev | ||||
|         id: docker_build | ||||
|         if: ${{ github.ref }} == "refs/heads/master" | ||||
|         uses: docker/build-push-action@v5 | ||||
|         uses: docker/build-push-action@v6 | ||||
|         with: | ||||
|           context: ./ | ||||
|           file: ./Dockerfile | ||||
|           push: true | ||||
|           tags: | | ||||
|             ${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev | ||||
|           platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8 | ||||
|           platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8 | ||||
|           cache-from: type=gha | ||||
|           cache-to: type=gha,mode=max | ||||
|  | ||||
| @@ -106,7 +106,7 @@ jobs: | ||||
|       - name: Build and push :tag | ||||
|         id: docker_build_tag_release | ||||
|         if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.') | ||||
|         uses: docker/build-push-action@v5 | ||||
|         uses: docker/build-push-action@v6 | ||||
|         with: | ||||
|           context: ./ | ||||
|           file: ./Dockerfile | ||||
| @@ -116,7 +116,7 @@ jobs: | ||||
|             ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }} | ||||
|             ${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest | ||||
|             ghcr.io/dgtlmoon/changedetection.io:latest | ||||
|           platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8 | ||||
|           platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8 | ||||
|           cache-from: type=gha | ||||
|           cache-to: type=gha,mode=max | ||||
| # Looks like this was disabled | ||||
|   | ||||
							
								
								
									
										6
									
								
								.github/workflows/test-container-build.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/test-container-build.yml
									
									
									
									
										vendored
									
									
								
							| @@ -51,7 +51,7 @@ jobs: | ||||
|         # Check we can still build under alpine/musl | ||||
|         - name: Test that the docker containers can build (musl via alpine check) | ||||
|           id: docker_build_musl | ||||
|           uses: docker/build-push-action@v5 | ||||
|           uses: docker/build-push-action@v6 | ||||
|           with: | ||||
|             context: ./ | ||||
|             file: ./.github/test/Dockerfile-alpine | ||||
| @@ -59,12 +59,12 @@ jobs: | ||||
|  | ||||
|         - name: Test that the docker containers can build | ||||
|           id: docker_build | ||||
|           uses: docker/build-push-action@v5 | ||||
|           uses: docker/build-push-action@v6 | ||||
|           # https://github.com/docker/build-push-action#customizing | ||||
|           with: | ||||
|             context: ./ | ||||
|             file: ./Dockerfile | ||||
|             platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8 | ||||
|             platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8 | ||||
|             cache-from: type=local,src=/tmp/.buildx-cache | ||||
|             cache-to: type=local,dest=/tmp/.buildx-cache | ||||
|  | ||||
|   | ||||
| @@ -93,7 +93,7 @@ jobs: | ||||
|       - name: Playwright and SocketPuppetBrowser - Headers and requests | ||||
|         run: |        | ||||
|           # Settings headers playwright tests - Call back in from Sockpuppetbrowser, check headers | ||||
|           docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000?dumpio=true" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0  --live-server-port=5004 tests/test_request.py' | ||||
|           docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000?dumpio=true" --network changedet-network test-changedetectionio  bash -c 'find .; cd changedetectionio; pytest --live-server-host=0.0.0.0  --live-server-port=5004 tests/test_request.py; pwd;find .' | ||||
|  | ||||
|       - name: Playwright and SocketPuppetBrowser - Restock detection | ||||
|         run: |                             | ||||
| @@ -231,9 +231,9 @@ jobs: | ||||
|           docker logs test-cdio-basic-tests > output-logs/test-cdio-basic-tests-stdout-${{ env.PYTHON_VERSION }}.txt | ||||
|           docker logs test-cdio-basic-tests 2> output-logs/test-cdio-basic-tests-stderr-${{ env.PYTHON_VERSION }}.txt | ||||
|  | ||||
|       - name: Store container log | ||||
|       - name: Store everything including test-datastore | ||||
|         if: always() | ||||
|         uses: actions/upload-artifact@v4 | ||||
|         with: | ||||
|           name: test-cdio-basic-tests-output-py${{ env.PYTHON_VERSION }} | ||||
|           path: output-logs | ||||
|           path: . | ||||
|   | ||||
							
								
								
									
										54
									
								
								COMMERCIAL_LICENCE.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										54
									
								
								COMMERCIAL_LICENCE.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,54 @@ | ||||
| # Generally | ||||
|  | ||||
| In any commercial activity involving 'Hosting' (as defined herein), whether in part or in full, this license must be executed and adhered to. | ||||
|  | ||||
| # Commercial License Agreement | ||||
|  | ||||
| This Commercial License Agreement ("Agreement") is entered into by and between Mr Morresi (the original creator of this software) here-in ("Licensor") and (your company or personal name) _____________ ("Licensee"). This Agreement sets forth the terms and conditions under which Licensor provides its software ("Software") and services to Licensee for the purpose of reselling the software either in part or full, as part of any commercial activity where the activity involves a third party. | ||||
|  | ||||
| ### Definition of Hosting | ||||
|  | ||||
| For the purposes of this Agreement, "hosting" means making the functionality of the Program or modified version available to third parties as a service. This includes, without limitation: | ||||
| - Enabling third parties to interact with the functionality of the Program or modified version remotely through a computer network. | ||||
| - Offering a service the value of which entirely or primarily derives from the value of the Program or modified version. | ||||
| - Offering a service that accomplishes for users the primary purpose of the Program or modified version. | ||||
|  | ||||
| ## 1. Grant of License | ||||
| Subject to the terms and conditions of this Agreement, Licensor grants Licensee a non-exclusive, non-transferable license to install, use, and resell the Software. Licensee may: | ||||
| - Resell the Software as part of a service offering or as a standalone product. | ||||
| - Host the Software on a server and provide it as a hosted service (e.g., Software as a Service - SaaS). | ||||
| - Integrate the Software into a larger product or service that is then sold or provided for commercial purposes, where the software is used either in part or full. | ||||
|  | ||||
| ## 2. License Fees | ||||
| Licensee agrees to pay Licensor the license fees specified in the ordering document. License fees are due and payable as specified in the ordering document. The fees may include initial licensing costs and recurring fees based on the number of end users, instances of the Software resold, or revenue generated from the resale activities. | ||||
|  | ||||
| ## 3. Resale Conditions | ||||
| Licensee must comply with the following conditions when reselling the Software, whether the software is resold in part or full: | ||||
| - Provide end users with access to the source code under the same open-source license conditions as provided by Licensor. | ||||
| - Clearly state in all marketing and sales materials that the Software is provided under a commercial license from Licensor, and provide a link back to https://changedetection.io. | ||||
| - Ensure end users are aware of and agree to the terms of the commercial license prior to resale. | ||||
| - Do not sublicense or transfer the Software to third parties except as part of an authorized resale activity. | ||||
|  | ||||
| ## 4. Hosting and Provision of Services | ||||
| Licensee may host the Software (either in part or full) on its servers and provide it as a hosted service to end users. The following conditions apply: | ||||
| - Licensee must ensure that all hosted versions of the Software comply with the terms of this Agreement. | ||||
| - Licensee must provide Licensor with regular reports detailing the number of end users and instances of the hosted service. | ||||
| - Any modifications to the Software made by Licensee for hosting purposes must be made available to end users under the same open-source license conditions, unless agreed otherwise. | ||||
|  | ||||
| ## 5. Services | ||||
| Licensor will provide support and maintenance services as described in the support policy referenced in the ordering document should such an agreement be signed by all parties. Additional fees may apply for support services provided to end users resold by Licensee. | ||||
|  | ||||
| ## 6. Reporting and Audits | ||||
| Licensee agrees to provide Licensor with regular reports detailing the number of instances, end users, and revenue generated from the resale of the Software. Licensor reserves the right to audit Licensee’s records to ensure compliance with this Agreement. | ||||
|  | ||||
| ## 7. Term and Termination | ||||
| This Agreement shall commence on the effective date and continue for the period set forth in the ordering document unless terminated earlier in accordance with this Agreement. Either party may terminate this Agreement if the other party breaches any material term and fails to cure such breach within thirty (30) days after receipt of written notice. | ||||
|  | ||||
| ## 8. Limitation of Liability and Disclaimer of Warranty | ||||
| Executing this commercial license does not waive the Limitation of Liability or Disclaimer of Warranty as stated in the open-source LICENSE provided with the Software. The Software is provided "as is," without warranty of any kind, express or implied, including but not limited to the warranties of merchantability, fitness for a particular purpose, and noninfringement. In no event shall the authors or copyright holders be liable for any claim, damages, or other liability, whether in an action of contract, tort, or otherwise, arising from, out of, or in connection with the Software or the use or other dealings in the Software. | ||||
|  | ||||
| ## 9. Governing Law | ||||
| This Agreement shall be governed by and construed in accordance with the laws of the Czech Republic. | ||||
|  | ||||
| ## Contact Information | ||||
| For commercial licensing inquiries, please contact contact@changedetection.io and dgtlmoon@gmail.com. | ||||
| @@ -3,9 +3,9 @@ | ||||
| # @NOTE! I would love to move to 3.11 but it breaks the async handler in changedetectionio/content_fetchers/puppeteer.py | ||||
| #        If you know how to fix it, please do! and test it for both 3.10 and 3.11 | ||||
|  | ||||
| ARG PYTHON_VERSION=3.10 | ||||
| ARG PYTHON_VERSION=3.11 | ||||
|  | ||||
| FROM python:${PYTHON_VERSION}-slim-bookworm as builder | ||||
| FROM python:${PYTHON_VERSION}-slim-bookworm AS builder | ||||
|  | ||||
| # See `cryptography` pin comment in requirements.txt | ||||
| ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1 | ||||
| @@ -26,7 +26,8 @@ WORKDIR /install | ||||
|  | ||||
| COPY requirements.txt /requirements.txt | ||||
|  | ||||
| RUN pip install --target=/dependencies -r /requirements.txt | ||||
| # --extra-index-url https://www.piwheels.org/simple  is for cryptography module to be prebuilt (or rustc etc needs to be installed) | ||||
| RUN pip install --extra-index-url https://www.piwheels.org/simple  --target=/dependencies -r /requirements.txt | ||||
|  | ||||
| # Playwright is an alternative to Selenium | ||||
| # Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing | ||||
| @@ -39,6 +40,8 @@ FROM python:${PYTHON_VERSION}-slim-bookworm | ||||
|  | ||||
| RUN apt-get update && apt-get install -y --no-install-recommends \ | ||||
|     libxslt1.1 \ | ||||
|     # For presenting price amounts correctly in the restock/price detection overview | ||||
|     locales \ | ||||
|     # For pdftohtml | ||||
|     poppler-utils \ | ||||
|     zlib1g \ | ||||
|   | ||||
							
								
								
									
										18
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										18
									
								
								README.md
									
									
									
									
									
								
							| @@ -41,6 +41,20 @@ Using the **Browser Steps** configuration, add basic steps before performing cha | ||||
| After **Browser Steps** have been run, then visit the **Visual Selector** tab to refine the content you're interested in. | ||||
| Requires Playwright to be enabled. | ||||
|  | ||||
| ### Awesome restock and price change notifications | ||||
|  | ||||
| Enable the _"Re-stock & Price detection for single product pages"_ option to activate the best way to monitor product pricing, this will extract any meta-data in the HTML page and give you many options to follow the pricing of the product. | ||||
|  | ||||
| Easily organise and monitor prices for products from the dashboard, get alerts and notifications when the price of a product changes or comes back in stock again! | ||||
|  | ||||
| [<img src="docs/restock-overview.png" style="max-width:100%;" alt="Easily keep an eye on product price changes directly from the UI"  title="Easily keep an eye on product price changes directly from the UI" />](https://changedetection.io?src=github) | ||||
|  | ||||
| Set price change notification parameters, upper and lower price, price change percentage and more. | ||||
| Always know when a product for sale drops in price. | ||||
|  | ||||
| [<img src="docs/restock-settings.png" style="max-width:100%;" alt="Set upper lower and percentage price change notification values"  title="Set upper lower and percentage price change notification values" />](https://changedetection.io?src=github) | ||||
|  | ||||
|  | ||||
|  | ||||
| ### Example use cases | ||||
|  | ||||
| @@ -272,6 +286,10 @@ I offer commercial support, this software is depended on by network security, ae | ||||
| [release-link]: https://github.com/dgtlmoon/changedetection.io/releases | ||||
| [docker-link]: https://hub.docker.com/r/dgtlmoon/changedetection.io | ||||
|  | ||||
| ## Commercial Licencing | ||||
|  | ||||
| If you are reselling this software either in part or full as part of any commercial arrangement, you must abide by our COMMERCIAL_LICENCE.md found in our code repository, please contact dgtlmoon@gmail.com and contact@changedetection.io . | ||||
|  | ||||
| ## Third-party licenses | ||||
|  | ||||
| changedetectionio.html_tools.elementpath_tostring: Copyright (c), 2018-2021, SISSA (Scuola Internazionale Superiore di Studi Avanzati), Licensed under [MIT license](https://github.com/sissaschool/elementpath/blob/master/LICENSE) | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| # Only exists for direct CLI usage | ||||
|  | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| # Read more https://github.com/dgtlmoon/changedetection.io/wiki | ||||
|  | ||||
| __version__ = '0.45.23' | ||||
| __version__ = '0.46.04' | ||||
|  | ||||
| from changedetectionio.strtobool import strtobool | ||||
| from json.decoder import JSONDecodeError | ||||
|   | ||||
| @@ -12,9 +12,10 @@ import copy | ||||
| # See docs/README.md for rebuilding the docs/apidoc information | ||||
|  | ||||
| from . import api_schema | ||||
| from ..model import watch_base | ||||
|  | ||||
| # Build a JSON Schema atleast partially based on our Watch model | ||||
| from changedetectionio.model.Watch import base_config as watch_base_config | ||||
| watch_base_config = watch_base() | ||||
| schema = api_schema.build_watch_json_schema(watch_base_config) | ||||
|  | ||||
| schema_create_watch = copy.deepcopy(schema) | ||||
| @@ -170,23 +171,33 @@ class WatchSingleHistory(Resource): | ||||
|             curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history/1677092977 -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" | ||||
|         @apiName Get single snapshot content | ||||
|         @apiGroup Watch History | ||||
|         @apiParam {String} [html]       Optional Set to =1 to return the last HTML (only stores last 2 snapshots, use `latest` as timestamp) | ||||
|         @apiSuccess (200) {String} OK | ||||
|         @apiSuccess (404) {String} ERR Not found | ||||
|         """ | ||||
|         watch = self.datastore.data['watching'].get(uuid) | ||||
|         if not watch: | ||||
|             abort(404, message='No watch exists with the UUID of {}'.format(uuid)) | ||||
|             abort(404, message=f"No watch exists with the UUID of {uuid}") | ||||
|  | ||||
|         if not len(watch.history): | ||||
|             abort(404, message='Watch found but no history exists for the UUID {}'.format(uuid)) | ||||
|             abort(404, message=f"Watch found but no history exists for the UUID {uuid}") | ||||
|  | ||||
|         if timestamp == 'latest': | ||||
|             timestamp = list(watch.history.keys())[-1] | ||||
|  | ||||
|         content = watch.get_history_snapshot(timestamp) | ||||
|         if request.args.get('html'): | ||||
|             content = watch.get_fetched_html(timestamp) | ||||
|             if content: | ||||
|                 response = make_response(content, 200) | ||||
|                 response.mimetype = "text/html" | ||||
|             else: | ||||
|                 response = make_response("No content found", 404) | ||||
|                 response.mimetype = "text/plain" | ||||
|         else: | ||||
|             content = watch.get_history_snapshot(timestamp) | ||||
|             response = make_response(content, 200) | ||||
|             response.mimetype = "text/plain" | ||||
|  | ||||
|         response = make_response(content, 200) | ||||
|         response.mimetype = "text/plain" | ||||
|         return response | ||||
|  | ||||
|  | ||||
|   | ||||
							
								
								
									
										78
									
								
								changedetectionio/apprise_plugin/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										78
									
								
								changedetectionio/apprise_plugin/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,78 @@ | ||||
| # include the decorator | ||||
| from apprise.decorators import notify | ||||
|  | ||||
| @notify(on="delete") | ||||
| @notify(on="deletes") | ||||
| @notify(on="get") | ||||
| @notify(on="gets") | ||||
| @notify(on="post") | ||||
| @notify(on="posts") | ||||
| @notify(on="put") | ||||
| @notify(on="puts") | ||||
| def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs): | ||||
|     import requests | ||||
|     import json | ||||
|     from apprise.utils import parse_url as apprise_parse_url | ||||
|     from apprise import URLBase | ||||
|  | ||||
|     url = kwargs['meta'].get('url') | ||||
|  | ||||
|     if url.startswith('post'): | ||||
|         r = requests.post | ||||
|     elif url.startswith('get'): | ||||
|         r = requests.get | ||||
|     elif url.startswith('put'): | ||||
|         r = requests.put | ||||
|     elif url.startswith('delete'): | ||||
|         r = requests.delete | ||||
|  | ||||
|     url = url.replace('post://', 'http://') | ||||
|     url = url.replace('posts://', 'https://') | ||||
|     url = url.replace('put://', 'http://') | ||||
|     url = url.replace('puts://', 'https://') | ||||
|     url = url.replace('get://', 'http://') | ||||
|     url = url.replace('gets://', 'https://') | ||||
|     url = url.replace('put://', 'http://') | ||||
|     url = url.replace('puts://', 'https://') | ||||
|     url = url.replace('delete://', 'http://') | ||||
|     url = url.replace('deletes://', 'https://') | ||||
|  | ||||
|     headers = {} | ||||
|     params = {} | ||||
|     auth = None | ||||
|  | ||||
|     # Convert /foobar?+some-header=hello to proper header dictionary | ||||
|     results = apprise_parse_url(url) | ||||
|     if results: | ||||
|         # Add our headers that the user can potentially over-ride if they wish | ||||
|         # to to our returned result set and tidy entries by unquoting them | ||||
|         headers = {URLBase.unquote(x): URLBase.unquote(y) | ||||
|                    for x, y in results['qsd+'].items()} | ||||
|  | ||||
|         # https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation | ||||
|         # In Apprise, it relies on prefixing each request arg with "-", because it uses say &method=update as a flag for apprise | ||||
|         # but here we are making straight requests, so we need todo convert this against apprise's logic | ||||
|         for k, v in results['qsd'].items(): | ||||
|             if not k.strip('+-') in results['qsd+'].keys(): | ||||
|                 params[URLBase.unquote(k)] = URLBase.unquote(v) | ||||
|  | ||||
|         # Determine Authentication | ||||
|         auth = '' | ||||
|         if results.get('user') and results.get('password'): | ||||
|             auth = (URLBase.unquote(results.get('user')), URLBase.unquote(results.get('user'))) | ||||
|         elif results.get('user'): | ||||
|             auth = (URLBase.unquote(results.get('user'))) | ||||
|  | ||||
|     # Try to auto-guess if it's JSON | ||||
|     try: | ||||
|         json.loads(body) | ||||
|         headers['Content-Type'] = 'application/json; charset=utf-8' | ||||
|     except ValueError as e: | ||||
|         pass | ||||
|  | ||||
|     r(results.get('url'), | ||||
|       auth=auth, | ||||
|       data=body.encode('utf-8') if type(body) is str else body, | ||||
|       headers=headers, | ||||
|       params=params | ||||
|       ) | ||||
| @@ -85,7 +85,8 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|         browsersteps_start_session['browserstepper'] = browser_steps.browsersteps_live_ui( | ||||
|             playwright_browser=browsersteps_start_session['browser'], | ||||
|             proxy=proxy, | ||||
|             start_url=datastore.data['watching'][watch_uuid].get('url') | ||||
|             start_url=datastore.data['watching'][watch_uuid].get('url'), | ||||
|             headers=datastore.data['watching'][watch_uuid].get('headers') | ||||
|         ) | ||||
|  | ||||
|         # For test | ||||
| @@ -187,8 +188,10 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|             u = browsersteps_sessions[browsersteps_session_id]['browserstepper'].page.url | ||||
|             if is_last_step and u: | ||||
|                 (screenshot, xpath_data) = browsersteps_sessions[browsersteps_session_id]['browserstepper'].request_visualselector_data() | ||||
|                 datastore.save_screenshot(watch_uuid=uuid, screenshot=screenshot) | ||||
|                 datastore.save_xpath_data(watch_uuid=uuid, data=xpath_data) | ||||
|                 watch = datastore.data['watching'].get(uuid) | ||||
|                 if watch: | ||||
|                     watch.save_screenshot(screenshot=screenshot) | ||||
|                     watch.save_xpath_data(data=xpath_data) | ||||
|  | ||||
| #        if not this_session.page: | ||||
| #            cleanup_playwright_session() | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import os | ||||
| import time | ||||
| @@ -25,6 +25,7 @@ browser_step_ui_config = {'Choose one': '0 0', | ||||
|                           'Click element if exists': '1 0', | ||||
|                           'Click element': '1 0', | ||||
|                           'Click element containing text': '0 1', | ||||
|                           'Click element containing text if exists': '0 1', | ||||
|                           'Enter text in field': '1 1', | ||||
|                           'Execute JS': '0 1', | ||||
| #                          'Extract text and use as filter': '1 0', | ||||
| @@ -96,12 +97,24 @@ class steppable_browser_interface(): | ||||
|         return self.action_goto_url(value=self.start_url) | ||||
|  | ||||
|     def action_click_element_containing_text(self, selector=None, value=''): | ||||
|         logger.debug("Clicking element containing text") | ||||
|         if not len(value.strip()): | ||||
|             return | ||||
|         elem = self.page.get_by_text(value) | ||||
|         if elem.count(): | ||||
|             elem.first.click(delay=randint(200, 500), timeout=3000) | ||||
|  | ||||
|     def action_click_element_containing_text_if_exists(self, selector=None, value=''): | ||||
|         logger.debug("Clicking element containing text if exists") | ||||
|         if not len(value.strip()): | ||||
|             return | ||||
|         elem = self.page.get_by_text(value) | ||||
|         logger.debug(f"Clicking element containing text - {elem.count()} elements found") | ||||
|         if elem.count(): | ||||
|             elem.first.click(delay=randint(200, 500), timeout=3000) | ||||
|         else: | ||||
|             return | ||||
|  | ||||
|     def action_enter_text_in_field(self, selector, value): | ||||
|         if not len(selector.strip()): | ||||
|             return | ||||
| @@ -255,8 +268,9 @@ class browsersteps_live_ui(steppable_browser_interface): | ||||
|  | ||||
|     def get_current_state(self): | ||||
|         """Return the screenshot and interactive elements mapping, generally always called after action_()""" | ||||
|         from pkg_resources import resource_string | ||||
|         xpath_element_js = resource_string(__name__, "../../content_fetchers/res/xpath_element_scraper.js").decode('utf-8') | ||||
|         import importlib.resources | ||||
|         xpath_element_js = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('xpath_element_scraper.js').read_text() | ||||
|  | ||||
|         now = time.time() | ||||
|         self.page.wait_for_timeout(1 * 1000) | ||||
|  | ||||
| @@ -287,11 +301,9 @@ class browsersteps_live_ui(steppable_browser_interface): | ||||
|         :param current_include_filters: | ||||
|         :return: | ||||
|         """ | ||||
|  | ||||
|         import importlib.resources | ||||
|         self.page.evaluate("var include_filters=''") | ||||
|         from pkg_resources import resource_string | ||||
|         # The code that scrapes elements and makes a list of elements/size/position to click on in the VisualSelector | ||||
|         xpath_element_js = resource_string(__name__, "../../content_fetchers/res/xpath_element_scraper.js").decode('utf-8') | ||||
|         xpath_element_js = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('xpath_element_scraper.js').read_text() | ||||
|         from changedetectionio.content_fetchers import visualselector_xpath_selectors | ||||
|         xpath_element_js = xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) | ||||
|         xpath_data = self.page.evaluate("async () => {" + xpath_element_js + "}") | ||||
|   | ||||
| @@ -30,7 +30,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     def long_task(uuid, preferred_proxy): | ||||
|         import time | ||||
|         from changedetectionio.content_fetchers import exceptions as content_fetcher_exceptions | ||||
|         from changedetectionio.processors import text_json_diff | ||||
|         from changedetectionio.processors.text_json_diff import text_json_diff | ||||
|         from changedetectionio.safe_jinja import render as jinja_render | ||||
|  | ||||
|         status = {'status': '', 'length': 0, 'text': ''} | ||||
|   | ||||
| @@ -17,6 +17,8 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q: PriorityQueue | ||||
|     @price_data_follower_blueprint.route("/<string:uuid>/accept", methods=['GET']) | ||||
|     def accept(uuid): | ||||
|         datastore.data['watching'][uuid]['track_ldjson_price_data'] = PRICE_DATA_TRACK_ACCEPT | ||||
|         datastore.data['watching'][uuid]['processor'] = 'restock_diff' | ||||
|         datastore.data['watching'][uuid].clear_watch() | ||||
|         update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False})) | ||||
|         return redirect(url_for("index")) | ||||
|  | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| from flask import Blueprint, request, make_response, render_template, flash, url_for, redirect | ||||
| from flask import Blueprint, request, render_template, flash, url_for, redirect | ||||
|  | ||||
|  | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from changedetectionio.flask_app import login_optionally_required | ||||
|  | ||||
| @@ -96,22 +98,55 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     @tags_blueprint.route("/edit/<string:uuid>", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def form_tag_edit(uuid): | ||||
|         from changedetectionio import forms | ||||
|  | ||||
|         from changedetectionio.blueprint.tags.form import group_restock_settings_form | ||||
|         if uuid == 'first': | ||||
|             uuid = list(datastore.data['settings']['application']['tags'].keys()).pop() | ||||
|  | ||||
|         default = datastore.data['settings']['application']['tags'].get(uuid) | ||||
|  | ||||
|         form = forms.watchForm(formdata=request.form if request.method == 'POST' else None, | ||||
|                                data=default, | ||||
|                                ) | ||||
|         form.datastore=datastore # needed? | ||||
|         form = group_restock_settings_form(formdata=request.form if request.method == 'POST' else None, | ||||
|                                        data=default, | ||||
|                                        extra_notification_tokens=datastore.get_unique_notification_tokens_available() | ||||
|                                        ) | ||||
|  | ||||
|         template_args = { | ||||
|             'data': default, | ||||
|             'form': form, | ||||
|             'watch': default, | ||||
|             'extra_notification_token_placeholder_info': datastore.get_unique_notification_token_placeholders_available(), | ||||
|         } | ||||
|  | ||||
|         included_content = {} | ||||
|         if form.extra_form_content(): | ||||
|             # So that the extra panels can access _helpers.html etc, we set the environment to load from templates/ | ||||
|             # And then render the code from the module | ||||
|             from jinja2 import Environment, FileSystemLoader | ||||
|             import importlib.resources | ||||
|             templates_dir = str(importlib.resources.files("changedetectionio").joinpath('templates')) | ||||
|             env = Environment(loader=FileSystemLoader(templates_dir)) | ||||
|             template_str = """{% from '_helpers.html' import render_field, render_checkbox_field, render_button %} | ||||
|         <script>         | ||||
|             $(document).ready(function () { | ||||
|                 toggleOpacity('#overrides_watch', '#restock-fieldset-price-group', true); | ||||
|             }); | ||||
|         </script>             | ||||
|                 <fieldset> | ||||
|                     <div class="pure-control-group"> | ||||
|                         <fieldset class="pure-group"> | ||||
|                         {{ render_checkbox_field(form.overrides_watch) }} | ||||
|                         <span class="pure-form-message-inline">Used for watches in "Restock & Price detection" mode</span> | ||||
|                         </fieldset> | ||||
|                 </fieldset> | ||||
|                 """ | ||||
|             template_str += form.extra_form_content() | ||||
|             template = env.from_string(template_str) | ||||
|             included_content = template.render(**template_args) | ||||
|  | ||||
|         output = render_template("edit-tag.html", | ||||
|                                  data=default, | ||||
|                                  form=form, | ||||
|                                  settings_application=datastore.data['settings']['application'], | ||||
|                                  extra_tab_content=form.extra_tab_content() if form.extra_tab_content() else None, | ||||
|                                  extra_form_content=included_content, | ||||
|                                  **template_args | ||||
|                                  ) | ||||
|  | ||||
|         return output | ||||
| @@ -120,14 +155,15 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     @tags_blueprint.route("/edit/<string:uuid>", methods=['POST']) | ||||
|     @login_optionally_required | ||||
|     def form_tag_edit_submit(uuid): | ||||
|         from changedetectionio import forms | ||||
|         from changedetectionio.blueprint.tags.form import group_restock_settings_form | ||||
|         if uuid == 'first': | ||||
|             uuid = list(datastore.data['settings']['application']['tags'].keys()).pop() | ||||
|  | ||||
|         default = datastore.data['settings']['application']['tags'].get(uuid) | ||||
|  | ||||
|         form = forms.watchForm(formdata=request.form if request.method == 'POST' else None, | ||||
|         form = group_restock_settings_form(formdata=request.form if request.method == 'POST' else None, | ||||
|                                data=default, | ||||
|                                extra_notification_tokens=datastore.get_unique_notification_tokens_available() | ||||
|                                ) | ||||
|         # @todo subclass form so validation works | ||||
|         #if not form.validate(): | ||||
| @@ -136,6 +172,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
| #           return redirect(url_for('tags.form_tag_edit_submit', uuid=uuid)) | ||||
|  | ||||
|         datastore.data['settings']['application']['tags'][uuid].update(form.data) | ||||
|         datastore.data['settings']['application']['tags'][uuid]['processor'] = 'restock_diff' | ||||
|         datastore.needs_write_urgent = True | ||||
|         flash("Updated") | ||||
|  | ||||
|   | ||||
| @@ -1,16 +1,15 @@ | ||||
| from wtforms import ( | ||||
|     BooleanField, | ||||
|     Form, | ||||
|     IntegerField, | ||||
|     RadioField, | ||||
|     SelectField, | ||||
|     StringField, | ||||
|     SubmitField, | ||||
|     TextAreaField, | ||||
|     validators, | ||||
| ) | ||||
| from wtforms.fields.simple import BooleanField | ||||
|  | ||||
| from changedetectionio.processors.restock_diff.forms import processor_settings_form as restock_settings_form | ||||
|  | ||||
| class group_restock_settings_form(restock_settings_form): | ||||
|     overrides_watch = BooleanField('Activate for individual watches in this tag/group?', default=False) | ||||
|  | ||||
| class SingleTag(Form): | ||||
|  | ||||
|   | ||||
| @@ -26,6 +26,9 @@ | ||||
|         <ul> | ||||
|             <li class="tab" id=""><a href="#general">General</a></li> | ||||
|             <li class="tab"><a href="#filters-and-triggers">Filters & Triggers</a></li> | ||||
|             {% if extra_tab_content %} | ||||
|             <li class="tab"><a href="#extras_tab">{{ extra_tab_content }}</a></li> | ||||
|             {% endif %} | ||||
|             <li class="tab"><a href="#notifications">Notifications</a></li> | ||||
|         </ul> | ||||
|     </div> | ||||
| @@ -55,15 +58,15 @@ xpath://body/div/span[contains(@class, 'example-class')]", | ||||
|                         {% if '/text()' in  field %} | ||||
|                           <span class="pure-form-message-inline"><strong>Note!: //text() function does not work where the <element> contains <![CDATA[]]></strong></span><br> | ||||
|                         {% endif %} | ||||
|                         <span class="pure-form-message-inline">One rule per line, <i>any</i> rules that matches will be used.<br> | ||||
|  | ||||
|                     <ul> | ||||
|                         <span class="pure-form-message-inline">One CSS, xPath, JSON Path/JQ selector per line, <i>any</i> rules that matches will be used.<br> | ||||
|                     <div data-target="#advanced-help-selectors" class="toggle-show pure-button button-tag button-xsmall">Show advanced help and tips</div> | ||||
|                     <ul id="advanced-help-selectors"> | ||||
|                         <li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li> | ||||
|                         <li>JSON - Limit text to this JSON rule, using either <a href="https://pypi.org/project/jsonpath-ng/" target="new">JSONPath</a> or <a href="https://stedolan.github.io/jq/" target="new">jq</a> (if installed). | ||||
|                             <ul> | ||||
|                                 <li>JSONPath: Prefix with <code>json:</code>, use <code>json:$</code> to force re-formatting if required,  <a href="https://jsonpath.com/" target="new">test your JSONPath here</a>.</li> | ||||
|                                 {% if jq_support %} | ||||
|                                 <li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>.</li> | ||||
|                                 <li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>. Prefix <code>jqraw:</code> outputs the results as text instead of a JSON list.</li> | ||||
|                                 {% else %} | ||||
|                                 <li>jq support not installed</li> | ||||
|                                 {% endif %} | ||||
| @@ -86,17 +89,25 @@ xpath://body/div/span[contains(@class, 'example-class')]", | ||||
|                     {{ render_field(form.subtractive_selectors, rows=5, placeholder="header | ||||
| footer | ||||
| nav | ||||
| .stockticker") }} | ||||
| .stockticker | ||||
| //*[contains(text(), 'Advertisement')]") }} | ||||
|                     <span class="pure-form-message-inline"> | ||||
|                         <ul> | ||||
|                           <li> Remove HTML element(s) by CSS selector before text conversion. </li> | ||||
|                           <li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li> | ||||
|                           <li> Remove HTML element(s) by CSS and XPath selectors before text conversion. </li> | ||||
|                           <li> Don't paste HTML here, use only CSS and XPath selectors </li> | ||||
|                           <li> Add multiple elements, CSS or XPath selectors per line to ignore multiple parts of the HTML. </li> | ||||
|                         </ul> | ||||
|                       </span> | ||||
|                 </fieldset> | ||||
|  | ||||
|             </div> | ||||
|  | ||||
|         {# rendered sub Template #} | ||||
|         {% if extra_form_content %} | ||||
|             <div class="tab-pane-inner" id="extras_tab"> | ||||
|             {{ extra_form_content|safe }} | ||||
|             </div> | ||||
|         {% endif %} | ||||
|             <div class="tab-pane-inner" id="notifications"> | ||||
|                 <fieldset> | ||||
|                     <div  class="pure-control-group inline-radio"> | ||||
| @@ -119,7 +130,7 @@ nav | ||||
|                         {% endif %} | ||||
|                         <a href="#notifications" id="notification-setting-reset-to-default" class="pure-button button-xsmall" style="right: 20px; top: 20px; position: absolute; background-color: #5f42dd; border-radius: 4px; font-size: 70%; color: #fff">Use system defaults</a> | ||||
|  | ||||
|                         {{ render_common_settings_form(form, emailprefix, settings_application) }} | ||||
|                         {{ render_common_settings_form(form, emailprefix, settings_application, extra_notification_token_placeholder_info) }} | ||||
|                     </div> | ||||
|                 </fieldset> | ||||
|             </div> | ||||
|   | ||||
| @@ -64,10 +64,9 @@ class Fetcher(): | ||||
|     render_extract_delay = 0 | ||||
|  | ||||
|     def __init__(self): | ||||
|         from pkg_resources import resource_string | ||||
|         # The code that scrapes elements and makes a list of elements/size/position to click on in the VisualSelector | ||||
|         self.xpath_element_js = resource_string(__name__, "res/xpath_element_scraper.js").decode('utf-8') | ||||
|         self.instock_data_js = resource_string(__name__, "res/stock-not-in-stock.js").decode('utf-8') | ||||
|         import importlib.resources | ||||
|         self.xpath_element_js = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('xpath_element_scraper.js').read_text(encoding='utf-8') | ||||
|         self.instock_data_js = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('stock-not-in-stock.js').read_text(encoding='utf-8') | ||||
|  | ||||
|     @abstractmethod | ||||
|     def get_error(self): | ||||
| @@ -82,7 +81,8 @@ class Fetcher(): | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False): | ||||
|             is_binary=False, | ||||
|             empty_pages_are_a_change=False): | ||||
|         # Should set self.error, self.status_code and self.content | ||||
|         pass | ||||
|  | ||||
| @@ -96,6 +96,9 @@ class Fetcher(): | ||||
|  | ||||
|     @abstractmethod | ||||
|     def screenshot_step(self, step_n): | ||||
|         if self.browser_steps_screenshot_path and not os.path.isdir(self.browser_steps_screenshot_path): | ||||
|             logger.debug(f"> Creating data dir {self.browser_steps_screenshot_path}") | ||||
|             os.mkdir(self.browser_steps_screenshot_path) | ||||
|         return None | ||||
|  | ||||
|     @abstractmethod | ||||
| @@ -169,5 +172,8 @@ class Fetcher(): | ||||
|                 if os.path.isfile(f): | ||||
|                     os.unlink(f) | ||||
|  | ||||
|     def save_step_html(self, param): | ||||
|     def save_step_html(self, step_n): | ||||
|         if self.browser_steps_screenshot_path and not os.path.isdir(self.browser_steps_screenshot_path): | ||||
|             logger.debug(f"> Creating data dir {self.browser_steps_screenshot_path}") | ||||
|             os.mkdir(self.browser_steps_screenshot_path) | ||||
|         pass | ||||
|   | ||||
| @@ -1,6 +1,5 @@ | ||||
| from loguru import logger | ||||
|  | ||||
|  | ||||
| class Non200ErrorCodeReceived(Exception): | ||||
|     def __init__(self, status_code, url, screenshot=None, xpath_data=None, page_html=None): | ||||
|         # Set this so we can use it in other parts of the app | ||||
| @@ -81,17 +80,18 @@ class ScreenshotUnavailable(Exception): | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         if page_html: | ||||
|             from html_tools import html_to_text | ||||
|             from changedetectionio.html_tools import html_to_text | ||||
|             self.page_text = html_to_text(page_html) | ||||
|         return | ||||
|  | ||||
|  | ||||
| class ReplyWithContentButNoText(Exception): | ||||
|     def __init__(self, status_code, url, screenshot=None, has_filters=False, html_content=''): | ||||
|     def __init__(self, status_code, url, screenshot=None, has_filters=False, html_content='', xpath_data=None): | ||||
|         # Set this so we can use it in other parts of the app | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         self.screenshot = screenshot | ||||
|         self.has_filters = has_filters | ||||
|         self.html_content = html_content | ||||
|         self.xpath_data = xpath_data | ||||
|         return | ||||
|   | ||||
| @@ -58,6 +58,7 @@ class fetcher(Fetcher): | ||||
|                 self.proxy['password'] = parsed.password | ||||
|  | ||||
|     def screenshot_step(self, step_n=''): | ||||
|         super().screenshot_step(step_n=step_n) | ||||
|         screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=int(os.getenv("SCREENSHOT_QUALITY", 72))) | ||||
|  | ||||
|         if self.browser_steps_screenshot_path is not None: | ||||
| @@ -67,6 +68,7 @@ class fetcher(Fetcher): | ||||
|                 f.write(screenshot) | ||||
|  | ||||
|     def save_step_html(self, step_n): | ||||
|         super().save_step_html(step_n=step_n) | ||||
|         content = self.page.content() | ||||
|         destination = os.path.join(self.browser_steps_screenshot_path, 'step_{}.html'.format(step_n)) | ||||
|         logger.debug(f"Saving step HTML to {destination}") | ||||
| @@ -81,7 +83,8 @@ class fetcher(Fetcher): | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False): | ||||
|             is_binary=False, | ||||
|             empty_pages_are_a_change=False): | ||||
|  | ||||
|         from playwright.sync_api import sync_playwright | ||||
|         import playwright._impl._errors | ||||
| @@ -128,7 +131,7 @@ class fetcher(Fetcher): | ||||
|             if response is None: | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 logger.debug("Content Fetcher > Response object was none") | ||||
|                 logger.debug("Content Fetcher > Response object from the browser communication was none") | ||||
|                 raise EmptyReply(url=url, status_code=None) | ||||
|  | ||||
|             try: | ||||
| @@ -164,10 +167,10 @@ class fetcher(Fetcher): | ||||
|  | ||||
|                 raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, screenshot=screenshot) | ||||
|  | ||||
|             if len(self.page.content().strip()) == 0: | ||||
|             if not empty_pages_are_a_change and len(self.page.content().strip()) == 0: | ||||
|                 logger.debug("Content Fetcher > Content was empty, empty_pages_are_a_change = False") | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 logger.debug("Content Fetcher > Content was empty") | ||||
|                 raise EmptyReply(url=url, status_code=response.status) | ||||
|  | ||||
|             # Run Browser Steps here | ||||
|   | ||||
| @@ -75,7 +75,8 @@ class fetcher(Fetcher): | ||||
|                          request_method, | ||||
|                          ignore_status_codes, | ||||
|                          current_include_filters, | ||||
|                          is_binary | ||||
|                          is_binary, | ||||
|                          empty_pages_are_a_change | ||||
|                          ): | ||||
|  | ||||
|         from changedetectionio.content_fetchers import visualselector_xpath_selectors | ||||
| @@ -153,7 +154,7 @@ class fetcher(Fetcher): | ||||
|         if response is None: | ||||
|             await self.page.close() | ||||
|             await browser.close() | ||||
|             logger.warning("Content Fetcher > Response object was none") | ||||
|             logger.warning("Content Fetcher > Response object was none (as in, the response from the browser was empty, not just the content)") | ||||
|             raise EmptyReply(url=url, status_code=None) | ||||
|  | ||||
|         self.headers = response.headers | ||||
| @@ -186,10 +187,11 @@ class fetcher(Fetcher): | ||||
|  | ||||
|             raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, screenshot=screenshot) | ||||
|         content = await self.page.content | ||||
|         if len(content.strip()) == 0: | ||||
|  | ||||
|         if not empty_pages_are_a_change and len(content.strip()) == 0: | ||||
|             logger.error("Content Fetcher > Content was empty (empty_pages_are_a_change is False), closing browsers") | ||||
|             await self.page.close() | ||||
|             await browser.close() | ||||
|             logger.error("Content Fetcher > Content was empty") | ||||
|             raise EmptyReply(url=url, status_code=response.status) | ||||
|  | ||||
|         # Run Browser Steps here | ||||
| @@ -247,7 +249,7 @@ class fetcher(Fetcher): | ||||
|         await self.fetch_page(**kwargs) | ||||
|  | ||||
|     def run(self, url, timeout, request_headers, request_body, request_method, ignore_status_codes=False, | ||||
|             current_include_filters=None, is_binary=False): | ||||
|             current_include_filters=None, is_binary=False, empty_pages_are_a_change=False): | ||||
|  | ||||
|         #@todo make update_worker async which could run any of these content_fetchers within memory and time constraints | ||||
|         max_time = os.getenv('PUPPETEER_MAX_PROCESSING_TIMEOUT_SECONDS', 180) | ||||
| @@ -262,7 +264,8 @@ class fetcher(Fetcher): | ||||
|                 request_method=request_method, | ||||
|                 ignore_status_codes=ignore_status_codes, | ||||
|                 current_include_filters=current_include_filters, | ||||
|                 is_binary=is_binary | ||||
|                 is_binary=is_binary, | ||||
|                 empty_pages_are_a_change=empty_pages_are_a_change | ||||
|             ), timeout=max_time)) | ||||
|         except asyncio.TimeoutError: | ||||
|             raise(BrowserFetchTimedOut(msg=f"Browser connected but was unable to process the page in {max_time} seconds.")) | ||||
|   | ||||
| @@ -1,9 +1,7 @@ | ||||
| from loguru import logger | ||||
| import hashlib | ||||
| import os | ||||
|  | ||||
| import chardet | ||||
| import requests | ||||
|  | ||||
| from changedetectionio import strtobool | ||||
| from changedetectionio.content_fetchers.exceptions import BrowserStepsInUnsupportedFetcher, EmptyReply, Non200ErrorCodeReceived | ||||
| from changedetectionio.content_fetchers.base import Fetcher | ||||
|  | ||||
| @@ -25,7 +23,11 @@ class fetcher(Fetcher): | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False): | ||||
|             is_binary=False, | ||||
|             empty_pages_are_a_change=False): | ||||
|  | ||||
|         import chardet | ||||
|         import requests | ||||
|  | ||||
|         if self.browser_steps_get_valid_steps(): | ||||
|             raise BrowserStepsInUnsupportedFetcher(url=url) | ||||
| @@ -45,13 +47,19 @@ class fetcher(Fetcher): | ||||
|             if self.system_https_proxy: | ||||
|                 proxies['https'] = self.system_https_proxy | ||||
|  | ||||
|         r = requests.request(method=request_method, | ||||
|                              data=request_body, | ||||
|                              url=url, | ||||
|                              headers=request_headers, | ||||
|                              timeout=timeout, | ||||
|                              proxies=proxies, | ||||
|                              verify=False) | ||||
|         session = requests.Session() | ||||
|  | ||||
|         if strtobool(os.getenv('ALLOW_FILE_URI', 'false')) and url.startswith('file://'): | ||||
|             from requests_file import FileAdapter | ||||
|             session.mount('file://', FileAdapter()) | ||||
|  | ||||
|         r = session.request(method=request_method, | ||||
|                             data=request_body.encode('utf-8') if type(request_body) is str else request_body, | ||||
|                             url=url, | ||||
|                             headers=request_headers, | ||||
|                             timeout=timeout, | ||||
|                             proxies=proxies, | ||||
|                             verify=False) | ||||
|  | ||||
|         # If the response did not tell us what encoding format to expect, Then use chardet to override what `requests` thinks. | ||||
|         # For example - some sites don't tell us it's utf-8, but return utf-8 content | ||||
| @@ -67,7 +75,10 @@ class fetcher(Fetcher): | ||||
|         self.headers = r.headers | ||||
|  | ||||
|         if not r.content or not len(r.content): | ||||
|             raise EmptyReply(url=url, status_code=r.status_code) | ||||
|             if not empty_pages_are_a_change: | ||||
|                 raise EmptyReply(url=url, status_code=r.status_code) | ||||
|             else: | ||||
|                 logger.debug(f"URL {url} gave zero byte content reply with Status Code {r.status_code}, but empty_pages_are_a_change = True") | ||||
|  | ||||
|         # @todo test this | ||||
|         # @todo maybe you really want to test zero-byte return pages? | ||||
|   | ||||
							
								
								
									
										1
									
								
								changedetectionio/content_fetchers/res/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								changedetectionio/content_fetchers/res/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| # resources for browser injection/scraping | ||||
| @@ -30,14 +30,21 @@ function isItemInStock() { | ||||
|         'dieser artikel ist bald wieder verfügbar', | ||||
|         'dostępne wkrótce', | ||||
|         'en rupture de stock', | ||||
|         'ist derzeit nicht auf lager', | ||||
|         'isn\'t in stock right now', | ||||
|         'isnt in stock right now', | ||||
|         'isn’t in stock right now', | ||||
|         'item is no longer available', | ||||
|         'let me know when it\'s available', | ||||
|         'mail me when available', | ||||
|         'message if back in stock', | ||||
|         'nachricht bei', | ||||
|         'nicht auf lager', | ||||
|         'nicht lagernd', | ||||
|         'nicht lieferbar', | ||||
|         'nicht verfügbar', | ||||
|         'nicht vorrätig', | ||||
|         'nicht zur verfügung', | ||||
|         'nie znaleziono produktów', | ||||
|         'niet beschikbaar', | ||||
|         'niet leverbaar', | ||||
|         'niet op voorraad', | ||||
| @@ -48,6 +55,7 @@ function isItemInStock() { | ||||
|         'not currently available', | ||||
|         'not in stock', | ||||
|         'notify me when available', | ||||
|         'notify me', | ||||
|         'notify when available', | ||||
|         'não estamos a aceitar encomendas', | ||||
|         'out of stock', | ||||
| @@ -62,12 +70,17 @@ function isItemInStock() { | ||||
|         'this item is currently unavailable', | ||||
|         'tickets unavailable', | ||||
|         'tijdelijk uitverkocht', | ||||
|         'unavailable nearby', | ||||
|         'unavailable tickets', | ||||
|         'vergriffen', | ||||
|         'vorbestellen', | ||||
|         'vorbestellung ist bald möglich', | ||||
|         'we don\'t currently have any', | ||||
|         'we couldn\'t find any products that match', | ||||
|         'we do not currently have an estimate of when this product will be back in stock.', | ||||
|         'we don\'t know when or if this item will be back in stock.', | ||||
|         'we were not able to find a match', | ||||
|         'when this arrives in stock', | ||||
|         'zur zeit nicht an lager', | ||||
|         '品切れ', | ||||
|         '已售', | ||||
| @@ -161,7 +174,8 @@ function isItemInStock() { | ||||
|         const element = elementsToScan[i]; | ||||
|         // outside the 'fold' or some weird text in the heading area | ||||
|         // .getBoundingClientRect() was causing a crash in chrome 119, can only be run on contentVisibility != hidden | ||||
|         if (element.getBoundingClientRect().top + window.scrollY >= vh + 150 || element.getBoundingClientRect().top + window.scrollY <= 100) { | ||||
|         // Note: theres also an automated test that places the 'out of stock' text fairly low down | ||||
|         if (element.getBoundingClientRect().top + window.scrollY >= vh + 250 || element.getBoundingClientRect().top + window.scrollY <= 100) { | ||||
|             continue | ||||
|         } | ||||
|         elementText = ""; | ||||
| @@ -175,7 +189,7 @@ function isItemInStock() { | ||||
|             // and these mean its out of stock | ||||
|             for (const outOfStockText of outOfStockTexts) { | ||||
|                 if (elementText.includes(outOfStockText)) { | ||||
|                     console.log(`Selected 'Out of Stock' - found text "${outOfStockText}" - "${elementText}"`) | ||||
|                     console.log(`Selected 'Out of Stock' - found text "${outOfStockText}" - "${elementText}" - offset top ${element.getBoundingClientRect().top}, page height is ${vh}`) | ||||
|                     return outOfStockText; // item is out of stock | ||||
|                 } | ||||
|             } | ||||
|   | ||||
| @@ -164,6 +164,15 @@ visibleElementsArray.forEach(function (element) { | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     let label = "not-interesting" // A placeholder, the actual labels for training are done by hand for now | ||||
|  | ||||
|     let text = element.textContent.trim().slice(0, 30).trim(); | ||||
|     while (/\n{2,}|\t{2,}/.test(text)) { | ||||
|         text = text.replace(/\n{2,}/g, '\n').replace(/\t{2,}/g, '\t') | ||||
|     } | ||||
|  | ||||
|     // Try to identify any possible currency amounts "Sale: 4000" or "Sale now 3000 Kc", can help with the training. | ||||
|     const hasDigitCurrency = (/\d/.test(text.slice(0, 6)) || /\d/.test(text.slice(-6)) ) &&  /([€£$¥₩₹]|USD|AUD|EUR|Kč|kr|SEK|,–)/.test(text) ; | ||||
|  | ||||
|     size_pos.push({ | ||||
|         xpath: xpath_result, | ||||
| @@ -171,9 +180,16 @@ visibleElementsArray.forEach(function (element) { | ||||
|         height: Math.round(bbox['height']), | ||||
|         left: Math.floor(bbox['left']), | ||||
|         top: Math.floor(bbox['top']) + scroll_y, | ||||
|         // tagName used by Browser Steps | ||||
|         tagName: (element.tagName) ? element.tagName.toLowerCase() : '', | ||||
|         // tagtype used by Browser Steps | ||||
|         tagtype: (element.tagName.toLowerCase() === 'input' && element.type) ? element.type.toLowerCase() : '', | ||||
|         isClickable: window.getComputedStyle(element).cursor == "pointer" | ||||
|         isClickable: window.getComputedStyle(element).cursor === "pointer", | ||||
|         // Used by the keras trainer | ||||
|         fontSize: window.getComputedStyle(element).getPropertyValue('font-size'), | ||||
|         fontWeight: window.getComputedStyle(element).getPropertyValue('font-weight'), | ||||
|         hasDigitCurrency: hasDigitCurrency, | ||||
|         label: label, | ||||
|     }); | ||||
|  | ||||
| }); | ||||
| @@ -182,6 +198,7 @@ visibleElementsArray.forEach(function (element) { | ||||
| // Inject the current one set in the include_filters, which may be a CSS rule | ||||
| // used for displaying the current one in VisualSelector, where its not one we generated. | ||||
| if (include_filters.length) { | ||||
|     let results; | ||||
|     // Foreach filter, go and find it on the page and add it to the results so we can visualise it again | ||||
|     for (const f of include_filters) { | ||||
|         bbox = false; | ||||
| @@ -197,10 +214,15 @@ if (include_filters.length) { | ||||
|             if (f.startsWith('/') || f.startsWith('xpath')) { | ||||
|                 var qry_f = f.replace(/xpath(:|\d:)/, '') | ||||
|                 console.log("[xpath] Scanning for included filter " + qry_f) | ||||
|                 q = document.evaluate(qry_f, document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue; | ||||
|                 let xpathResult = document.evaluate(qry_f, document, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE, null); | ||||
|                 results = []; | ||||
|                 for (let i = 0; i < xpathResult.snapshotLength; i++) { | ||||
|                     results.push(xpathResult.snapshotItem(i)); | ||||
|                 } | ||||
|             } else { | ||||
|                 console.log("[css] Scanning for included filter " + f) | ||||
|                 q = document.querySelector(f); | ||||
|                 console.log("[css] Scanning for included filter " + f); | ||||
|                 results = document.querySelectorAll(f); | ||||
|             } | ||||
|         } catch (e) { | ||||
|             // Maybe catch DOMException and alert? | ||||
| @@ -208,44 +230,45 @@ if (include_filters.length) { | ||||
|             console.log(e); | ||||
|         } | ||||
|  | ||||
|         if (q) { | ||||
|             // Try to resolve //something/text() back to its /something so we can atleast get the bounding box | ||||
|             try { | ||||
|                 if (typeof q.nodeName == 'string' && q.nodeName === '#text') { | ||||
|                     q = q.parentElement | ||||
|                 } | ||||
|             } catch (e) { | ||||
|                 console.log(e) | ||||
|                 console.log("xpath_element_scraper: #text resolver") | ||||
|             } | ||||
|         if (results != null && results.length) { | ||||
|  | ||||
|             // #1231 - IN the case XPath attribute filter is applied, we will have to traverse up and find the element. | ||||
|             if (typeof q.getBoundingClientRect == 'function') { | ||||
|                 bbox = q.getBoundingClientRect(); | ||||
|                 console.log("xpath_element_scraper: Got filter element, scroll from top was " + scroll_y) | ||||
|             } else { | ||||
|             // Iterate over the results | ||||
|             results.forEach(node => { | ||||
|                 // Try to resolve //something/text() back to its /something so we can atleast get the bounding box | ||||
|                 try { | ||||
|                     // Try and see we can find its ownerElement | ||||
|                     bbox = q.ownerElement.getBoundingClientRect(); | ||||
|                     console.log("xpath_element_scraper: Got filter by ownerElement element, scroll from top was " + scroll_y) | ||||
|                     if (typeof node.nodeName == 'string' && node.nodeName === '#text') { | ||||
|                         node = node.parentElement | ||||
|                     } | ||||
|                 } catch (e) { | ||||
|                     console.log(e) | ||||
|                     console.log("xpath_element_scraper: error looking up q.ownerElement") | ||||
|                     console.log("xpath_element_scraper: #text resolver") | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         if (!q) { | ||||
|             console.log("xpath_element_scraper: filter element " + f + " was not found"); | ||||
|         } | ||||
|                 // #1231 - IN the case XPath attribute filter is applied, we will have to traverse up and find the element. | ||||
|                 if (typeof node.getBoundingClientRect == 'function') { | ||||
|                     bbox = node.getBoundingClientRect(); | ||||
|                     console.log("xpath_element_scraper: Got filter element, scroll from top was " + scroll_y) | ||||
|                 } else { | ||||
|                     try { | ||||
|                         // Try and see we can find its ownerElement | ||||
|                         bbox = node.ownerElement.getBoundingClientRect(); | ||||
|                         console.log("xpath_element_scraper: Got filter by ownerElement element, scroll from top was " + scroll_y) | ||||
|                     } catch (e) { | ||||
|                         console.log(e) | ||||
|                         console.log("xpath_element_scraper: error looking up q.ownerElement") | ||||
|                     } | ||||
|                 } | ||||
|  | ||||
|         if (bbox && bbox['width'] > 0 && bbox['height'] > 0) { | ||||
|             size_pos.push({ | ||||
|                 xpath: f, | ||||
|                 width: parseInt(bbox['width']), | ||||
|                 height: parseInt(bbox['height']), | ||||
|                 left: parseInt(bbox['left']), | ||||
|                 top: parseInt(bbox['top']) + scroll_y | ||||
|                 if (bbox && bbox['width'] > 0 && bbox['height'] > 0) { | ||||
|                     size_pos.push({ | ||||
|                         xpath: f, | ||||
|                         width: parseInt(bbox['width']), | ||||
|                         height: parseInt(bbox['height']), | ||||
|                         left: parseInt(bbox['left']), | ||||
|                         top: parseInt(bbox['top']) + scroll_y, | ||||
|                         highlight_as_custom_filter: true | ||||
|                     }); | ||||
|                 } | ||||
|             }); | ||||
|         } | ||||
|     } | ||||
|   | ||||
| @@ -56,7 +56,8 @@ class fetcher(Fetcher): | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False): | ||||
|             is_binary=False, | ||||
|             empty_pages_are_a_change=False): | ||||
|  | ||||
|         from selenium import webdriver | ||||
|         from selenium.webdriver.chrome.options import Options as ChromeOptions | ||||
|   | ||||
| @@ -1,62 +1,97 @@ | ||||
| # used for the notifications, the front-end is using a JS library | ||||
|  | ||||
| import difflib | ||||
| from typing import List, Iterator, Union | ||||
|  | ||||
| def same_slicer(lst: List[str], start: int, end: int) -> List[str]: | ||||
|     """Return a slice of the list, or a single element if start == end.""" | ||||
|     return lst[start:end] if start != end else [lst[start]] | ||||
|  | ||||
| def same_slicer(l, a, b): | ||||
|     if a == b: | ||||
|         return [l[a]] | ||||
|     else: | ||||
|         return l[a:b] | ||||
|  | ||||
| # like .compare but a little different output | ||||
| def customSequenceMatcher(before, after, include_equal=False, include_removed=True, include_added=True, include_replaced=True, include_change_type_prefix=True): | ||||
|     cruncher = difflib.SequenceMatcher(isjunk=lambda x: x in " \\t", a=before, b=after) | ||||
|  | ||||
|     # @todo Line-by-line mode instead of buncghed, including `after` that is not in `before` (maybe unset?) | ||||
| def customSequenceMatcher( | ||||
|     before: List[str], | ||||
|     after: List[str], | ||||
|     include_equal: bool = False, | ||||
|     include_removed: bool = True, | ||||
|     include_added: bool = True, | ||||
|     include_replaced: bool = True, | ||||
|     include_change_type_prefix: bool = True | ||||
| ) -> Iterator[List[str]]: | ||||
|     """ | ||||
|     Compare two sequences and yield differences based on specified parameters. | ||||
|      | ||||
|     Args: | ||||
|         before (List[str]): Original sequence | ||||
|         after (List[str]): Modified sequence | ||||
|         include_equal (bool): Include unchanged parts | ||||
|         include_removed (bool): Include removed parts | ||||
|         include_added (bool): Include added parts | ||||
|         include_replaced (bool): Include replaced parts | ||||
|         include_change_type_prefix (bool): Add prefixes to indicate change types | ||||
|      | ||||
|     Yields: | ||||
|         List[str]: Differences between sequences | ||||
|     """ | ||||
|     cruncher = difflib.SequenceMatcher(isjunk=lambda x: x in " \t", a=before, b=after) | ||||
|      | ||||
|     for tag, alo, ahi, blo, bhi in cruncher.get_opcodes(): | ||||
|         if include_equal and tag == 'equal': | ||||
|             g = before[alo:ahi] | ||||
|             yield g | ||||
|             yield before[alo:ahi] | ||||
|         elif include_removed and tag == 'delete': | ||||
|             row_prefix = "(removed) " if include_change_type_prefix else '' | ||||
|             g = [ row_prefix + i for i in same_slicer(before, alo, ahi)] | ||||
|             yield g | ||||
|             prefix = "(removed) " if include_change_type_prefix else '' | ||||
|             yield [f"{prefix}{line}" for line in same_slicer(before, alo, ahi)] | ||||
|         elif include_replaced and tag == 'replace': | ||||
|             row_prefix = "(changed) " if include_change_type_prefix else '' | ||||
|             g = [row_prefix + i for i in same_slicer(before, alo, ahi)] | ||||
|             row_prefix = "(into) " if include_change_type_prefix else '' | ||||
|             g += [row_prefix + i for i in same_slicer(after, blo, bhi)] | ||||
|             yield g | ||||
|             prefix_changed = "(changed) " if include_change_type_prefix else '' | ||||
|             prefix_into = "(into) " if include_change_type_prefix else '' | ||||
|             yield [f"{prefix_changed}{line}" for line in same_slicer(before, alo, ahi)] + \ | ||||
|                   [f"{prefix_into}{line}" for line in same_slicer(after, blo, bhi)] | ||||
|         elif include_added and tag == 'insert': | ||||
|             row_prefix = "(added) " if include_change_type_prefix else '' | ||||
|             g = [row_prefix + i for i in same_slicer(after, blo, bhi)] | ||||
|             yield g | ||||
|             prefix = "(added) " if include_change_type_prefix else '' | ||||
|             yield [f"{prefix}{line}" for line in same_slicer(after, blo, bhi)] | ||||
|  | ||||
| # only_differences - only return info about the differences, no context | ||||
| # line_feed_sep could be "<br>" or "<li>" or "\n" etc | ||||
| def render_diff(previous_version_file_contents, newest_version_file_contents, include_equal=False, include_removed=True, include_added=True, include_replaced=True, line_feed_sep="\n", include_change_type_prefix=True, patch_format=False): | ||||
|  | ||||
|     newest_version_file_contents = [line.rstrip() for line in newest_version_file_contents.splitlines()] | ||||
|  | ||||
|     if previous_version_file_contents: | ||||
|         previous_version_file_contents = [line.rstrip() for line in previous_version_file_contents.splitlines()] | ||||
|     else: | ||||
|         previous_version_file_contents = "" | ||||
| def render_diff( | ||||
|     previous_version_file_contents: str, | ||||
|     newest_version_file_contents: str, | ||||
|     include_equal: bool = False, | ||||
|     include_removed: bool = True, | ||||
|     include_added: bool = True, | ||||
|     include_replaced: bool = True, | ||||
|     line_feed_sep: str = "\n", | ||||
|     include_change_type_prefix: bool = True, | ||||
|     patch_format: bool = False | ||||
| ) -> str: | ||||
|     """ | ||||
|     Render the difference between two file contents. | ||||
|      | ||||
|     Args: | ||||
|         previous_version_file_contents (str): Original file contents | ||||
|         newest_version_file_contents (str): Modified file contents | ||||
|         include_equal (bool): Include unchanged parts | ||||
|         include_removed (bool): Include removed parts | ||||
|         include_added (bool): Include added parts | ||||
|         include_replaced (bool): Include replaced parts | ||||
|         line_feed_sep (str): Separator for lines in output | ||||
|         include_change_type_prefix (bool): Add prefixes to indicate change types | ||||
|         patch_format (bool): Use patch format for output | ||||
|      | ||||
|     Returns: | ||||
|         str: Rendered difference | ||||
|     """ | ||||
|     newest_lines = [line.rstrip() for line in newest_version_file_contents.splitlines()] | ||||
|     previous_lines = [line.rstrip() for line in previous_version_file_contents.splitlines()] if previous_version_file_contents else [] | ||||
|  | ||||
|     if patch_format: | ||||
|         patch = difflib.unified_diff(previous_version_file_contents, newest_version_file_contents) | ||||
|         patch = difflib.unified_diff(previous_lines, newest_lines) | ||||
|         return line_feed_sep.join(patch) | ||||
|  | ||||
|     rendered_diff = customSequenceMatcher(before=previous_version_file_contents, | ||||
|                                           after=newest_version_file_contents, | ||||
|                                           include_equal=include_equal, | ||||
|                                           include_removed=include_removed, | ||||
|                                           include_added=include_added, | ||||
|                                           include_replaced=include_replaced, | ||||
|                                           include_change_type_prefix=include_change_type_prefix) | ||||
|     rendered_diff = customSequenceMatcher( | ||||
|         before=previous_lines, | ||||
|         after=newest_lines, | ||||
|         include_equal=include_equal, | ||||
|         include_removed=include_removed, | ||||
|         include_added=include_added, | ||||
|         include_replaced=include_replaced, | ||||
|         include_change_type_prefix=include_change_type_prefix | ||||
|     ) | ||||
|  | ||||
|     # Recursively join lists | ||||
|     f = lambda L: line_feed_sep.join([f(x) if type(x) is list else x for x in L]) | ||||
|     p= f(rendered_diff) | ||||
|     return p | ||||
|     def flatten(lst: List[Union[str, List[str]]]) -> str: | ||||
|         return line_feed_sep.join(flatten(x) if isinstance(x, list) else x for x in lst) | ||||
|  | ||||
|     return flatten(rendered_diff) | ||||
|   | ||||
| @@ -1,18 +1,26 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import datetime | ||||
| import importlib | ||||
|  | ||||
| import flask_login | ||||
| import locale | ||||
| import os | ||||
| import pytz | ||||
| import queue | ||||
| import threading | ||||
| import time | ||||
| import timeago | ||||
|  | ||||
| from .content_fetchers.exceptions import ReplyWithContentButNoText | ||||
| from .processors import find_processors, get_parent_module, get_custom_watch_obj_for_processor | ||||
| from .processors.text_json_diff.processor import FilterNotFoundInResponse | ||||
| from .safe_jinja import render as jinja_render | ||||
| from changedetectionio.strtobool import strtobool | ||||
| from copy import deepcopy | ||||
| from functools import wraps | ||||
| from threading import Event | ||||
| import flask_login | ||||
| import pytz | ||||
| import timeago | ||||
|  | ||||
| from feedgen.feed import FeedGenerator | ||||
| from flask import ( | ||||
|     Flask, | ||||
| @@ -79,6 +87,14 @@ csrf = CSRFProtect() | ||||
| csrf.init_app(app) | ||||
| notification_debug_log=[] | ||||
|  | ||||
| # get locale ready | ||||
| default_locale = locale.getdefaultlocale() | ||||
| logger.info(f"System locale default is {default_locale}") | ||||
| try: | ||||
|     locale.setlocale(locale.LC_ALL, default_locale) | ||||
| except locale.Error: | ||||
|     logger.warning(f"Unable to set locale {default_locale}, locale is not installed maybe?") | ||||
|  | ||||
| watch_api = Api(app, decorators=[csrf.exempt]) | ||||
|  | ||||
| def init_app_secret(datastore_path): | ||||
| @@ -108,6 +124,14 @@ def get_darkmode_state(): | ||||
| def get_css_version(): | ||||
|     return __version__ | ||||
|  | ||||
| @app.template_filter('format_number_locale') | ||||
| def _jinja2_filter_format_number_locale(value: float) -> str: | ||||
|     "Formats for example 4000.10 to the local locale default of 4,000.10" | ||||
|     # Format the number with two decimal places (locale format string will return 6 decimal) | ||||
|     formatted_value = locale.format_string("%.2f", value, grouping=True) | ||||
|  | ||||
|     return formatted_value | ||||
|  | ||||
| # We use the whole watch object from the store/JSON so we can see if there's some related status in terms of a thread | ||||
| # running or something similar. | ||||
| @app.template_filter('format_last_checked_time') | ||||
| @@ -512,12 +536,22 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|     @login_optionally_required | ||||
|     def ajax_callback_send_notification_test(watch_uuid=None): | ||||
|  | ||||
|         # Watch_uuid could be unsuet in the case its used in tag editor, global setings | ||||
|         # Watch_uuid could be unset in the case its used in tag editor, global setings | ||||
|         import apprise | ||||
|         import random | ||||
|         from .apprise_asset import asset | ||||
|         apobj = apprise.Apprise(asset=asset) | ||||
|         # so that the custom endpoints are registered | ||||
|         from changedetectionio.apprise_plugin import apprise_custom_api_call_wrapper | ||||
|         is_global_settings_form = request.args.get('mode', '') == 'global-settings' | ||||
|         is_group_settings_form = request.args.get('mode', '') == 'group-settings' | ||||
|  | ||||
|         watch = datastore.data['watching'].get(watch_uuid) if watch_uuid else None | ||||
|         # Use an existing random one on the global/main settings form | ||||
|         if not watch_uuid and (is_global_settings_form or is_group_settings_form): | ||||
|             logger.debug(f"Send test notification - Choosing random Watch {watch_uuid}") | ||||
|             watch_uuid = random.choice(list(datastore.data['watching'].keys())) | ||||
|  | ||||
|         watch = datastore.data['watching'].get(watch_uuid) | ||||
|  | ||||
|         notification_urls = request.form['notification_urls'].strip().splitlines() | ||||
|  | ||||
| @@ -529,8 +563,6 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                     tag = datastore.tag_exists_by_name(k.strip()) | ||||
|                     notification_urls = tag.get('notifications_urls') if tag and tag.get('notifications_urls') else None | ||||
|  | ||||
|         is_global_settings_form = request.args.get('mode', '') == 'global-settings' | ||||
|         is_group_settings_form = request.args.get('mode', '') == 'group-settings' | ||||
|         if not notification_urls and not is_global_settings_form and not is_group_settings_form: | ||||
|             # In the global settings, use only what is typed currently in the text box | ||||
|             logger.debug("Test notification - Trying by global system settings notifications") | ||||
| @@ -549,7 +581,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|         try: | ||||
|             # use the same as when it is triggered, but then override it with the form test values | ||||
|             n_object = { | ||||
|                 'watch_url': request.form['window_url'], | ||||
|                 'watch_url': request.form.get('window_url', "https://changedetection.io"), | ||||
|                 'notification_urls': notification_urls | ||||
|             } | ||||
|  | ||||
| @@ -616,11 +648,11 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|     @login_optionally_required | ||||
|     # https://stackoverflow.com/questions/42984453/wtforms-populate-form-with-data-if-data-exists | ||||
|     # https://wtforms.readthedocs.io/en/3.0.x/forms/#wtforms.form.Form.populate_obj ? | ||||
|  | ||||
|     def edit_page(uuid): | ||||
|         from . import forms | ||||
|         from .blueprint.browser_steps.browser_steps import browser_step_ui_config | ||||
|         from . import processors | ||||
|         import importlib | ||||
|  | ||||
|         # More for testing, possible to return the first/only | ||||
|         if not datastore.data['watching'].keys(): | ||||
| @@ -652,14 +684,40 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|             # Radio needs '' not None, or incase that the chosen one no longer exists | ||||
|             if default['proxy'] is None or not any(default['proxy'] in tup for tup in datastore.proxy_list): | ||||
|                 default['proxy'] = '' | ||||
|  | ||||
|         # proxy_override set to the json/text list of the items | ||||
|         form = forms.watchForm(formdata=request.form if request.method == 'POST' else None, | ||||
|                                data=default | ||||
|                                ) | ||||
|  | ||||
|         # For the form widget tag uuid lookup | ||||
|         form.tags.datastore = datastore # in _value | ||||
|         # Does it use some custom form? does one exist? | ||||
|         processor_name = datastore.data['watching'][uuid].get('processor', '') | ||||
|         processor_classes = next((tpl for tpl in find_processors() if tpl[1] == processor_name), None) | ||||
|         if not processor_classes: | ||||
|             flash(f"Cannot load the edit form for processor/plugin '{processor_classes[1]}', plugin missing?", 'error') | ||||
|             return redirect(url_for('index')) | ||||
|  | ||||
|         parent_module = get_parent_module(processor_classes[0]) | ||||
|  | ||||
|         try: | ||||
|             # Get the parent of the "processor.py" go up one, get the form (kinda spaghetti but its reusing existing code) | ||||
|             forms_module = importlib.import_module(f"{parent_module.__name__}.forms") | ||||
|             # Access the 'processor_settings_form' class from the 'forms' module | ||||
|             form_class = getattr(forms_module, 'processor_settings_form') | ||||
|         except ModuleNotFoundError as e: | ||||
|             # .forms didnt exist | ||||
|             form_class = forms.processor_text_json_diff_form | ||||
|         except AttributeError as e: | ||||
|             # .forms exists but no useful form | ||||
|             form_class = forms.processor_text_json_diff_form | ||||
|  | ||||
|         form = form_class(formdata=request.form if request.method == 'POST' else None, | ||||
|                           data=default, | ||||
|                           extra_notification_tokens=default.extra_notification_token_values() | ||||
|                           ) | ||||
|  | ||||
|         # For the form widget tag UUID back to "string name" for the field | ||||
|         form.tags.datastore = datastore | ||||
|  | ||||
|         # Used by some forms that need to dig deeper | ||||
|         form.datastore = datastore | ||||
|         form.watch = default | ||||
|  | ||||
|         for p in datastore.extra_browsers: | ||||
|             form.fetch_backend.choices.append(p) | ||||
| @@ -679,7 +737,15 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|  | ||||
|         if request.method == 'POST' and form.validate(): | ||||
|  | ||||
|             extra_update_obj = {} | ||||
|             # If they changed processor, it makes sense to reset it. | ||||
|             if datastore.data['watching'][uuid].get('processor') != form.data.get('processor'): | ||||
|                 datastore.data['watching'][uuid].clear_watch() | ||||
|                 flash("Reset watch history due to change of processor") | ||||
|  | ||||
|             extra_update_obj = { | ||||
|                 'consecutive_filter_failures': 0, | ||||
|                 'last_error' : False | ||||
|             } | ||||
|  | ||||
|             if request.args.get('unpause_on_save'): | ||||
|                 extra_update_obj['paused'] = False | ||||
| @@ -717,10 +783,16 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|             datastore.data['watching'][uuid].update(form.data) | ||||
|             datastore.data['watching'][uuid].update(extra_update_obj) | ||||
|  | ||||
|             if request.args.get('unpause_on_save'): | ||||
|                 flash("Updated watch - unpaused!.") | ||||
|             else: | ||||
|                 flash("Updated watch.") | ||||
|             if not datastore.data['watching'][uuid].get('tags'): | ||||
|                 # Force it to be a list, because form.data['tags'] will be string if nothing found | ||||
|                 # And del(form.data['tags'] ) wont work either for some reason | ||||
|                 datastore.data['watching'][uuid]['tags'] = [] | ||||
|  | ||||
|             # Recast it if need be to right data Watch handler | ||||
|             watch_class = get_custom_watch_obj_for_processor(form.data.get('processor')) | ||||
|             datastore.data['watching'][uuid] = watch_class(datastore_path=datastore_o.datastore_path, default=datastore.data['watching'][uuid]) | ||||
|  | ||||
|             flash("Updated watch - unpaused!" if request.args.get('unpause_on_save') else "Updated watch.") | ||||
|  | ||||
|             # Re #286 - We wait for syncing new data to disk in another thread every 60 seconds | ||||
|             # But in the case something is added we should save straight away | ||||
| @@ -750,6 +822,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                 jq_support = False | ||||
|  | ||||
|             watch = datastore.data['watching'].get(uuid) | ||||
|  | ||||
|             system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver' | ||||
|  | ||||
|             is_html_webdriver = False | ||||
| @@ -758,23 +831,42 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|  | ||||
|             # Only works reliably with Playwright | ||||
|             visualselector_enabled = os.getenv('PLAYWRIGHT_DRIVER_URL', False) and is_html_webdriver | ||||
|             template_args = { | ||||
|                 'available_processors': processors.available_processors(), | ||||
|                 'browser_steps_config': browser_step_ui_config, | ||||
|                 'emailprefix': os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False), | ||||
|                 'extra_title': f" - Edit - {watch.label}", | ||||
|                 'extra_processor_config': form.extra_tab_content(), | ||||
|                 'extra_notification_token_placeholder_info': datastore.get_unique_notification_token_placeholders_available(), | ||||
|                 'form': form, | ||||
|                 'has_default_notification_urls': True if len(datastore.data['settings']['application']['notification_urls']) else False, | ||||
|                 'has_extra_headers_file': len(datastore.get_all_headers_in_textfile_for_watch(uuid=uuid)) > 0, | ||||
|                 'has_special_tag_options': _watch_has_tag_options_set(watch=watch), | ||||
|                 'is_html_webdriver': is_html_webdriver, | ||||
|                 'jq_support': jq_support, | ||||
|                 'playwright_enabled': os.getenv('PLAYWRIGHT_DRIVER_URL', False), | ||||
|                 'settings_application': datastore.data['settings']['application'], | ||||
|                 'using_global_webdriver_wait': not default['webdriver_delay'], | ||||
|                 'uuid': uuid, | ||||
|                 'visualselector_enabled': visualselector_enabled, | ||||
|                 'watch': watch | ||||
|             } | ||||
|  | ||||
|             included_content = None | ||||
|             if form.extra_form_content(): | ||||
|                 # So that the extra panels can access _helpers.html etc, we set the environment to load from templates/ | ||||
|                 # And then render the code from the module | ||||
|                 from jinja2 import Environment, FileSystemLoader | ||||
|                 import importlib.resources | ||||
|                 templates_dir = str(importlib.resources.files("changedetectionio").joinpath('templates')) | ||||
|                 env = Environment(loader=FileSystemLoader(templates_dir)) | ||||
|                 template = env.from_string(form.extra_form_content()) | ||||
|                 included_content = template.render(**template_args) | ||||
|  | ||||
|             output = render_template("edit.html", | ||||
|                                      available_processors=processors.available_processors(), | ||||
|                                      browser_steps_config=browser_step_ui_config, | ||||
|                                      emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False), | ||||
|                                      extra_title=f" - Edit - {watch.label}", | ||||
|                                      form=form, | ||||
|                                      has_default_notification_urls=True if len(datastore.data['settings']['application']['notification_urls']) else False, | ||||
|                                      has_extra_headers_file=len(datastore.get_all_headers_in_textfile_for_watch(uuid=uuid)) > 0, | ||||
|                                      has_special_tag_options=_watch_has_tag_options_set(watch=watch), | ||||
|                                      is_html_webdriver=is_html_webdriver, | ||||
|                                      jq_support=jq_support, | ||||
|                                      playwright_enabled=os.getenv('PLAYWRIGHT_DRIVER_URL', False), | ||||
|                                      settings_application=datastore.data['settings']['application'], | ||||
|                                      using_global_webdriver_wait=not default['webdriver_delay'], | ||||
|                                      uuid=uuid, | ||||
|                                      visualselector_enabled=visualselector_enabled, | ||||
|                                      watch=watch | ||||
|                                      extra_tab_content=form.extra_tab_content() if form.extra_tab_content() else None, | ||||
|                                      extra_form_content=included_content, | ||||
|                                      **template_args | ||||
|                                      ) | ||||
|  | ||||
|         return output | ||||
| @@ -800,7 +892,8 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|  | ||||
|         # Don't use form.data on POST so that it doesnt overrid the checkbox status from the POST status | ||||
|         form = forms.globalSettingsForm(formdata=request.form if request.method == 'POST' else None, | ||||
|                                         data=default | ||||
|                                         data=default, | ||||
|                                         extra_notification_tokens=datastore.get_unique_notification_tokens_available() | ||||
|                                         ) | ||||
|  | ||||
|         # Remove the last option 'System default' | ||||
| @@ -852,6 +945,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|         output = render_template("settings.html", | ||||
|                                  api_key=datastore.data['settings']['application'].get('api_access_token'), | ||||
|                                  emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False), | ||||
|                                  extra_notification_token_placeholder_info=datastore.get_unique_notification_token_placeholders_available(), | ||||
|                                  form=form, | ||||
|                                  hide_remove_pass=os.getenv("SALTED_PASS", False), | ||||
|                                  min_system_recheck_seconds=int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 3)), | ||||
| @@ -884,7 +978,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|             if request.values.get('urls') and len(request.values.get('urls').strip()): | ||||
|                 # Import and push into the queue for immediate update check | ||||
|                 importer = import_url_list() | ||||
|                 importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore, processor=request.values.get('processor')) | ||||
|                 importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore, processor=request.values.get('processor', 'text_json_diff')) | ||||
|                 for uuid in importer.new_uuids: | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True})) | ||||
|  | ||||
| @@ -1280,6 +1374,84 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|         except FileNotFoundError: | ||||
|             abort(404) | ||||
|  | ||||
|     @app.route("/edit/<string:uuid>/get-html", methods=['GET']) | ||||
|     @login_optionally_required | ||||
|     def watch_get_latest_html(uuid): | ||||
|         from io import BytesIO | ||||
|         from flask import send_file | ||||
|         import brotli | ||||
|  | ||||
|         watch = datastore.data['watching'].get(uuid) | ||||
|         if watch and watch.history.keys() and os.path.isdir(watch.watch_data_dir): | ||||
|             latest_filename = list(watch.history.keys())[-1] | ||||
|             html_fname = os.path.join(watch.watch_data_dir, f"{latest_filename}.html.br") | ||||
|             with open(html_fname, 'rb') as f: | ||||
|                 if html_fname.endswith('.br'): | ||||
|                     # Read and decompress the Brotli file | ||||
|                     decompressed_data = brotli.decompress(f.read()) | ||||
|                 else: | ||||
|                     decompressed_data = f.read() | ||||
|  | ||||
|             buffer = BytesIO(decompressed_data) | ||||
|  | ||||
|             return send_file(buffer, as_attachment=True, download_name=f"{latest_filename}.html", mimetype='text/html') | ||||
|  | ||||
|  | ||||
|         # Return a 500 error | ||||
|         abort(500) | ||||
|  | ||||
|     @app.route("/edit/<string:uuid>/preview-rendered", methods=['POST']) | ||||
|     @login_optionally_required | ||||
|     def watch_get_preview_rendered(uuid): | ||||
|         '''For when viewing the "preview" of the rendered text from inside of Edit''' | ||||
|         now = time.time() | ||||
|         import brotli | ||||
|         from . import forms | ||||
|  | ||||
|         text_after_filter = '' | ||||
|         tmp_watch = deepcopy(datastore.data['watching'].get(uuid)) | ||||
|  | ||||
|         if tmp_watch and tmp_watch.history and os.path.isdir(tmp_watch.watch_data_dir): | ||||
|             # Splice in the temporary stuff from the form | ||||
|             form = forms.processor_text_json_diff_form(formdata=request.form if request.method == 'POST' else None, | ||||
|                                                        data=request.form | ||||
|                                                        ) | ||||
|             # Only update vars that came in via the AJAX post | ||||
|             p = {k: v for k, v in form.data.items() if k in request.form.keys()} | ||||
|             tmp_watch.update(p) | ||||
|  | ||||
|             latest_filename = next(reversed(tmp_watch.history)) | ||||
|             html_fname = os.path.join(tmp_watch.watch_data_dir, f"{latest_filename}.html.br") | ||||
|             with open(html_fname, 'rb') as f: | ||||
|                 decompressed_data = brotli.decompress(f.read()).decode('utf-8') if html_fname.endswith('.br') else f.read().decode('utf-8') | ||||
|  | ||||
|                 # Just like a normal change detection except provide a fake "watch" object and dont call .call_browser() | ||||
|                 processor_module = importlib.import_module("changedetectionio.processors.text_json_diff.processor") | ||||
|                 update_handler = processor_module.perform_site_check(datastore=datastore, | ||||
|                                                                      watch_uuid=uuid # probably not needed anymore anyway? | ||||
|                                                                      ) | ||||
|                 # Use the last loaded HTML as the input | ||||
|                 update_handler.fetcher.content = decompressed_data | ||||
|                 update_handler.fetcher.headers['content-type'] = tmp_watch.get('content-type') | ||||
|                 try: | ||||
|                     changed_detected, update_obj, contents, text_after_filter = update_handler.run_changedetection( | ||||
|                         watch=tmp_watch, | ||||
|                         skip_when_checksum_same=False, | ||||
|                     ) | ||||
|                 except FilterNotFoundInResponse as e: | ||||
|                     text_after_filter = f"Filter not found in HTML: {str(e)}" | ||||
|                 except ReplyWithContentButNoText as e: | ||||
|                     text_after_filter = f"Filter found but no text (empty result)" | ||||
|                 except Exception as e: | ||||
|                     text_after_filter = f"Error: {str(e)}" | ||||
|  | ||||
|             if not text_after_filter.strip(): | ||||
|                 text_after_filter = 'Empty content' | ||||
|  | ||||
|         logger.trace(f"Parsed in {time.time()-now:.3f}s") | ||||
|         return text_after_filter.strip() | ||||
|  | ||||
|  | ||||
|     @app.route("/form/add/quickwatch", methods=['POST']) | ||||
|     @login_optionally_required | ||||
|     def form_quick_watch_add(): | ||||
| @@ -1385,7 +1557,7 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                     update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False})) | ||||
|                     i += 1 | ||||
|  | ||||
|         flash("{} watches queued for rechecking.".format(i)) | ||||
|         flash(f"{i} watches queued for rechecking.") | ||||
|         return redirect(url_for('index', tag=tag)) | ||||
|  | ||||
|     @app.route("/form/checkbox-operations", methods=['POST']) | ||||
| @@ -1479,9 +1651,13 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|                     for uuid in uuids: | ||||
|                         uuid = uuid.strip() | ||||
|                         if datastore.data['watching'].get(uuid): | ||||
|                             # Bug in old versions caused by bad edit page/tag handler | ||||
|                             if isinstance(datastore.data['watching'][uuid]['tags'], str): | ||||
|                                 datastore.data['watching'][uuid]['tags'] = [] | ||||
|  | ||||
|                             datastore.data['watching'][uuid]['tags'].append(tag_uuid) | ||||
|  | ||||
|             flash("{} watches assigned tag".format(len(uuids))) | ||||
|             flash(f"{len(uuids)} watches were tagged") | ||||
|  | ||||
|         return redirect(url_for('index')) | ||||
|  | ||||
|   | ||||
| @@ -1,5 +1,6 @@ | ||||
| import os | ||||
| import re | ||||
|  | ||||
| from changedetectionio.strtobool import strtobool | ||||
|  | ||||
| from wtforms import ( | ||||
| @@ -220,7 +221,8 @@ class ValidateAppRiseServers(object): | ||||
|     def __call__(self, form, field): | ||||
|         import apprise | ||||
|         apobj = apprise.Apprise() | ||||
|  | ||||
|         # so that the custom endpoints are registered | ||||
|         from changedetectionio.apprise_plugin import apprise_custom_api_call_wrapper | ||||
|         for server_url in field.data: | ||||
|             if not apobj.add(server_url): | ||||
|                 message = field.gettext('\'%s\' is not a valid AppRise URL.' % (server_url)) | ||||
| @@ -230,9 +232,6 @@ class ValidateJinja2Template(object): | ||||
|     """ | ||||
|     Validates that a {token} is from a valid set | ||||
|     """ | ||||
|     def __init__(self, message=None): | ||||
|         self.message = message | ||||
|  | ||||
|     def __call__(self, form, field): | ||||
|         from changedetectionio import notification | ||||
|  | ||||
| @@ -247,6 +246,10 @@ class ValidateJinja2Template(object): | ||||
|         try: | ||||
|             jinja2_env = ImmutableSandboxedEnvironment(loader=BaseLoader) | ||||
|             jinja2_env.globals.update(notification.valid_tokens) | ||||
|             # Extra validation tokens provided on the form_class(... extra_tokens={}) setup | ||||
|             if hasattr(field, 'extra_notification_tokens'): | ||||
|                 jinja2_env.globals.update(field.extra_notification_tokens) | ||||
|  | ||||
|             jinja2_env.from_string(joined_data).render() | ||||
|         except TemplateSyntaxError as e: | ||||
|             raise ValidationError(f"This is not a valid Jinja2 template: {e}") from e | ||||
| @@ -419,15 +422,24 @@ class quickWatchForm(Form): | ||||
|  | ||||
| # Common to a single watch and the global settings | ||||
| class commonSettingsForm(Form): | ||||
|     from . import processors | ||||
|  | ||||
|     notification_urls = StringListField('Notification URL List', validators=[validators.Optional(), ValidateAppRiseServers(), ValidateJinja2Template()]) | ||||
|     notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()]) | ||||
|     def __init__(self, formdata=None, obj=None, prefix="", data=None, meta=None, **kwargs): | ||||
|         super().__init__(formdata, obj, prefix, data, meta, **kwargs) | ||||
|         self.notification_body.extra_notification_tokens = kwargs.get('extra_notification_tokens', {}) | ||||
|         self.notification_title.extra_notification_tokens = kwargs.get('extra_notification_tokens', {}) | ||||
|         self.notification_urls.extra_notification_tokens = kwargs.get('extra_notification_tokens', {}) | ||||
|  | ||||
|     extract_title_as_title = BooleanField('Extract <title> from document and use as watch title', default=False) | ||||
|     fetch_backend = RadioField(u'Fetch Method', choices=content_fetchers.available_fetchers(), validators=[ValidateContentFetcherIsReady()]) | ||||
|     notification_body = TextAreaField('Notification Body', default='{{ watch_url }} had a change.', validators=[validators.Optional(), ValidateJinja2Template()]) | ||||
|     notification_format = SelectField('Notification format', choices=valid_notification_formats.keys()) | ||||
|     fetch_backend = RadioField(u'Fetch Method', choices=content_fetchers.available_fetchers(), validators=[ValidateContentFetcherIsReady()]) | ||||
|     extract_title_as_title = BooleanField('Extract <title> from document and use as watch title', default=False) | ||||
|     webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, | ||||
|                                                                                                                                     message="Should contain one or more seconds")]) | ||||
|     notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()]) | ||||
|     notification_urls = StringListField('Notification URL List', validators=[validators.Optional(), ValidateAppRiseServers(), ValidateJinja2Template()]) | ||||
|     processor = RadioField( label=u"Processor - What do you want to achieve?", choices=processors.available_processors(), default="text_json_diff") | ||||
|     webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, message="Should contain one or more seconds")]) | ||||
|  | ||||
|  | ||||
| class importForm(Form): | ||||
|     from . import processors | ||||
|     processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff") | ||||
| @@ -447,7 +459,7 @@ class SingleBrowserStep(Form): | ||||
| #    remove_button = SubmitField('-', render_kw={"type": "button", "class": "pure-button pure-button-primary", 'title': 'Remove'}) | ||||
| #    add_button = SubmitField('+', render_kw={"type": "button", "class": "pure-button pure-button-primary", 'title': 'Add new step after'}) | ||||
|  | ||||
| class watchForm(commonSettingsForm): | ||||
| class processor_text_json_diff_form(commonSettingsForm): | ||||
|  | ||||
|     url = fields.URLField('URL', validators=[validateURL()]) | ||||
|     tags = StringTagUUID('Group tag', [validators.Optional()], default='') | ||||
| @@ -457,7 +469,7 @@ class watchForm(commonSettingsForm): | ||||
|  | ||||
|     include_filters = StringListField('CSS/JSONPath/JQ/XPath Filters', [ValidateCSSJSONXPATHInput()], default='') | ||||
|  | ||||
|     subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)]) | ||||
|     subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_json=False)]) | ||||
|  | ||||
|     extract_text = StringListField('Extract text', [ValidateListRegex()]) | ||||
|  | ||||
| @@ -468,16 +480,15 @@ class watchForm(commonSettingsForm): | ||||
|     body = TextAreaField('Request body', [validators.Optional()]) | ||||
|     method = SelectField('Request method', choices=valid_method, default=default_method) | ||||
|     ignore_status_codes = BooleanField('Ignore status codes (process non-2xx status codes as normal)', default=False) | ||||
|     check_unique_lines = BooleanField('Only trigger when unique lines appear', default=False) | ||||
|     check_unique_lines = BooleanField('Only trigger when unique lines appear in all history', default=False) | ||||
|     remove_duplicate_lines = BooleanField('Remove duplicate lines of text', default=False) | ||||
|     sort_text_alphabetically =  BooleanField('Sort text alphabetically', default=False) | ||||
|     trim_text_whitespace = BooleanField('Trim whitespace before and after text', default=False) | ||||
|  | ||||
|     filter_text_added = BooleanField('Added lines', default=True) | ||||
|     filter_text_replaced = BooleanField('Replaced/changed lines', default=True) | ||||
|     filter_text_removed = BooleanField('Removed lines', default=True) | ||||
|  | ||||
|     # @todo this class could be moved to its own text_json_diff_watchForm and this goes to restock_diff_Watchform perhaps | ||||
|     in_stock_only = BooleanField('Only trigger when product goes BACK to in-stock', default=True) | ||||
|  | ||||
|     trigger_text = StringListField('Trigger/wait for text', [validators.Optional(), ValidateListRegex()]) | ||||
|     if os.getenv("PLAYWRIGHT_DRIVER_URL"): | ||||
|         browser_steps = FieldList(FormField(SingleBrowserStep), min_entries=10) | ||||
| @@ -493,6 +504,12 @@ class watchForm(commonSettingsForm): | ||||
|     notification_muted = BooleanField('Notifications Muted / Off', default=False) | ||||
|     notification_screenshot = BooleanField('Attach screenshot to notification (where possible)', default=False) | ||||
|  | ||||
|     def extra_tab_content(self): | ||||
|         return None | ||||
|  | ||||
|     def extra_form_content(self): | ||||
|         return None | ||||
|  | ||||
|     def validate(self, **kwargs): | ||||
|         if not super().validate(): | ||||
|             return False | ||||
| @@ -513,7 +530,6 @@ class watchForm(commonSettingsForm): | ||||
|             result = False | ||||
|         return result | ||||
|  | ||||
|  | ||||
| class SingleExtraProxy(Form): | ||||
|  | ||||
|     # maybe better to set some <script>var.. | ||||
| @@ -562,7 +578,7 @@ class globalSettingsApplicationForm(commonSettingsForm): | ||||
|     empty_pages_are_a_change =  BooleanField('Treat empty pages as a change?', default=False) | ||||
|     fetch_backend = RadioField('Fetch Method', default="html_requests", choices=content_fetchers.available_fetchers(), validators=[ValidateContentFetcherIsReady()]) | ||||
|     global_ignore_text = StringListField('Ignore Text', [ValidateListRegex()]) | ||||
|     global_subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)]) | ||||
|     global_subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_json=False)]) | ||||
|     ignore_whitespace = BooleanField('Ignore whitespace') | ||||
|     password = SaltyPasswordField() | ||||
|     pager_size = IntegerField('Pager size', | ||||
| @@ -584,6 +600,11 @@ class globalSettingsForm(Form): | ||||
|     # Define these as FormFields/"sub forms", this way it matches the JSON storage | ||||
|     # datastore.data['settings']['application'].. | ||||
|     # datastore.data['settings']['requests'].. | ||||
|     def __init__(self, formdata=None, obj=None, prefix="", data=None, meta=None, **kwargs): | ||||
|         super().__init__(formdata, obj, prefix, data, meta, **kwargs) | ||||
|         self.application.notification_body.extra_notification_tokens = kwargs.get('extra_notification_tokens', {}) | ||||
|         self.application.notification_title.extra_notification_tokens = kwargs.get('extra_notification_tokens', {}) | ||||
|         self.application.notification_urls.extra_notification_tokens = kwargs.get('extra_notification_tokens', {}) | ||||
|  | ||||
|     requests = FormField(globalSettingsRequestForm) | ||||
|     application = FormField(globalSettingsApplicationForm) | ||||
|   | ||||
| @@ -1,12 +1,5 @@ | ||||
|  | ||||
| from bs4 import BeautifulSoup | ||||
| from inscriptis import get_text | ||||
| from jsonpath_ng.ext import parse | ||||
| from typing import List | ||||
| from inscriptis.css_profiles import CSS_PROFILES, HtmlElement | ||||
| from inscriptis.html_properties import Display | ||||
| from inscriptis.model.config import ParserConfig | ||||
| from xml.sax.saxutils import escape as xml_escape | ||||
| from lxml import etree | ||||
| import json | ||||
| import re | ||||
|  | ||||
| @@ -41,6 +34,7 @@ def perl_style_slash_enclosed_regex_to_options(regex): | ||||
|  | ||||
| # Given a CSS Rule, and a blob of HTML, return the blob of HTML that matches | ||||
| def include_filters(include_filters, html_content, append_pretty_line_formatting=False): | ||||
|     from bs4 import BeautifulSoup | ||||
|     soup = BeautifulSoup(html_content, "html.parser") | ||||
|     html_block = "" | ||||
|     r = soup.select(include_filters, separator="") | ||||
| @@ -58,16 +52,32 @@ def include_filters(include_filters, html_content, append_pretty_line_formatting | ||||
|     return html_block | ||||
|  | ||||
| def subtractive_css_selector(css_selector, html_content): | ||||
|     from bs4 import BeautifulSoup | ||||
|     soup = BeautifulSoup(html_content, "html.parser") | ||||
|     for item in soup.select(css_selector): | ||||
|         item.decompose() | ||||
|     return str(soup) | ||||
|  | ||||
| def subtractive_xpath_selector(xpath_selector, html_content):  | ||||
|     html_tree = etree.HTML(html_content) | ||||
|     elements_to_remove = html_tree.xpath(xpath_selector) | ||||
|  | ||||
|     for element in elements_to_remove: | ||||
|         element.getparent().remove(element) | ||||
|  | ||||
|     modified_html = etree.tostring(html_tree, method="html").decode("utf-8") | ||||
|     return modified_html | ||||
|  | ||||
| def element_removal(selectors: List[str], html_content): | ||||
|     """Joins individual filters into one css filter.""" | ||||
|     selector = ",".join(selectors) | ||||
|     return subtractive_css_selector(selector, html_content) | ||||
|     """Removes elements that match a list of CSS or xPath selectors.""" | ||||
|     modified_html = html_content | ||||
|     for selector in selectors: | ||||
|         if selector.startswith(('xpath:', 'xpath1:', '//')): | ||||
|             xpath_selector = selector.removeprefix('xpath:').removeprefix('xpath1:') | ||||
|             modified_html = subtractive_xpath_selector(xpath_selector, modified_html) | ||||
|         else: | ||||
|             modified_html = subtractive_css_selector(selector, modified_html) | ||||
|     return modified_html | ||||
|  | ||||
| def elementpath_tostring(obj): | ||||
|     """ | ||||
| @@ -183,6 +193,7 @@ def xpath1_filter(xpath_filter, html_content, append_pretty_line_formatting=Fals | ||||
|  | ||||
| # Extract/find element | ||||
| def extract_element(find='title', html_content=''): | ||||
|     from bs4 import BeautifulSoup | ||||
|  | ||||
|     #Re #106, be sure to handle when its not found | ||||
|     element_text = None | ||||
| @@ -196,12 +207,14 @@ def extract_element(find='title', html_content=''): | ||||
|  | ||||
| # | ||||
| def _parse_json(json_data, json_filter): | ||||
|     if 'json:' in json_filter: | ||||
|     from jsonpath_ng.ext import parse | ||||
|  | ||||
|     if json_filter.startswith("json:"): | ||||
|         jsonpath_expression = parse(json_filter.replace('json:', '')) | ||||
|         match = jsonpath_expression.find(json_data) | ||||
|         return _get_stripped_text_from_json_match(match) | ||||
|  | ||||
|     if 'jq:' in json_filter: | ||||
|     if json_filter.startswith("jq:") or json_filter.startswith("jqraw:"): | ||||
|  | ||||
|         try: | ||||
|             import jq | ||||
| @@ -209,10 +222,15 @@ def _parse_json(json_data, json_filter): | ||||
|             # `jq` requires full compilation in windows and so isn't generally available | ||||
|             raise Exception("jq not support not found") | ||||
|  | ||||
|         jq_expression = jq.compile(json_filter.replace('jq:', '')) | ||||
|         match = jq_expression.input(json_data).all() | ||||
|         if json_filter.startswith("jq:"): | ||||
|             jq_expression = jq.compile(json_filter.removeprefix("jq:")) | ||||
|             match = jq_expression.input(json_data).all() | ||||
|             return _get_stripped_text_from_json_match(match) | ||||
|  | ||||
|         return _get_stripped_text_from_json_match(match) | ||||
|         if json_filter.startswith("jqraw:"): | ||||
|             jq_expression = jq.compile(json_filter.removeprefix("jqraw:")) | ||||
|             match = jq_expression.input(json_data).all() | ||||
|             return '\n'.join(str(item) for item in match) | ||||
|  | ||||
| def _get_stripped_text_from_json_match(match): | ||||
|     s = [] | ||||
| @@ -239,8 +257,10 @@ def _get_stripped_text_from_json_match(match): | ||||
| # json_filter - ie json:$..price | ||||
| # ensure_is_ldjson_info_type - str "product", optional, "@type == product" (I dont know how to do that as a json selector) | ||||
| def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None): | ||||
|     stripped_text_from_html = False | ||||
|     from bs4 import BeautifulSoup | ||||
|  | ||||
|     stripped_text_from_html = False | ||||
| # https://github.com/dgtlmoon/changedetection.io/pull/2041#issuecomment-1848397161w | ||||
|     # Try to parse/filter out the JSON, if we get some parser error, then maybe it's embedded within HTML tags | ||||
|     try: | ||||
|         stripped_text_from_html = _parse_json(json.loads(content), json_filter) | ||||
| @@ -279,17 +299,19 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None | ||||
|                 if isinstance(json_data, dict): | ||||
|                     # If it has LD JSON 'key' @type, and @type is 'product', and something was found for the search | ||||
|                     # (Some sites have multiple of the same ld+json @type='product', but some have the review part, some have the 'price' part) | ||||
|                     # @type could also be a list (Product, SubType) | ||||
|                     # @type could also be a list although non-standard ("@type": ["Product", "SubType"],) | ||||
|                     # LD_JSON auto-extract also requires some content PLUS the ldjson to be present | ||||
|                     # 1833 - could be either str or dict, should not be anything else | ||||
|                     if json_data.get('@type') and stripped_text_from_html: | ||||
|                         try: | ||||
|                             if json_data.get('@type') == str or json_data.get('@type') == dict: | ||||
|                                 types = [json_data.get('@type')] if isinstance(json_data.get('@type'), str) else json_data.get('@type') | ||||
|                                 if ensure_is_ldjson_info_type.lower() in [x.lower().strip() for x in types]: | ||||
|                                     break | ||||
|                         except: | ||||
|                             continue | ||||
|  | ||||
|                     t = json_data.get('@type') | ||||
|                     if t and stripped_text_from_html: | ||||
|  | ||||
|                         if isinstance(t, str) and t.lower() == ensure_is_ldjson_info_type.lower(): | ||||
|                             break | ||||
|                         # The non-standard part, some have a list | ||||
|                         elif isinstance(t, list): | ||||
|                             if ensure_is_ldjson_info_type.lower() in [x.lower().strip() for x in t]: | ||||
|                                 break | ||||
|  | ||||
|             elif stripped_text_from_html: | ||||
|                 break | ||||
| @@ -347,6 +369,7 @@ def strip_ignore_text(content, wordlist, mode="content"): | ||||
|     return "\n".encode('utf8').join(output) | ||||
|  | ||||
| def cdata_in_document_to_text(html_content: str, render_anchor_tag_content=False) -> str: | ||||
|     from xml.sax.saxutils import escape as xml_escape | ||||
|     pattern = '<!\[CDATA\[(\s*(?:.(?<!\]\]>)\s*)*)\]\]>' | ||||
|     def repl(m): | ||||
|         text = m.group(1) | ||||
| @@ -355,6 +378,9 @@ def cdata_in_document_to_text(html_content: str, render_anchor_tag_content=False | ||||
|     return re.sub(pattern, repl, html_content) | ||||
|  | ||||
| def html_to_text(html_content: str, render_anchor_tag_content=False, is_rss=False) -> str: | ||||
|     from inscriptis import get_text | ||||
|     from inscriptis.model.config import ParserConfig | ||||
|  | ||||
|     """Converts html string to a string with just the text. If ignoring | ||||
|     rendering anchor tag content is enable, anchor tag content are also | ||||
|     included in the text | ||||
| @@ -392,22 +418,23 @@ def html_to_text(html_content: str, render_anchor_tag_content=False, is_rss=Fals | ||||
|  | ||||
| # Does LD+JSON exist with a @type=='product' and a .price set anywhere? | ||||
| def has_ldjson_product_info(content): | ||||
|     pricing_data = '' | ||||
|  | ||||
|     try: | ||||
|         if not 'application/ld+json' in content: | ||||
|             return False | ||||
|  | ||||
|         for filter in LD_JSON_PRODUCT_OFFER_SELECTORS: | ||||
|             pricing_data += extract_json_as_string(content=content, | ||||
|                                                   json_filter=filter, | ||||
|                                                   ensure_is_ldjson_info_type="product") | ||||
|         lc = content.lower() | ||||
|         if 'application/ld+json' in lc and lc.count('"price"') == 1 and '"pricecurrency"' in lc: | ||||
|             return True | ||||
|  | ||||
| #       On some pages this is really terribly expensive when they dont really need it | ||||
| #       (For example you never want price monitoring, but this runs on every watch to suggest it) | ||||
| #        for filter in LD_JSON_PRODUCT_OFFER_SELECTORS: | ||||
| #            pricing_data += extract_json_as_string(content=content, | ||||
| #                                                  json_filter=filter, | ||||
| #                                                  ensure_is_ldjson_info_type="product") | ||||
|     except Exception as e: | ||||
|         # Totally fine | ||||
|         # OK too | ||||
|         return False | ||||
|     x=bool(pricing_data) | ||||
|     return x | ||||
|  | ||||
|     return False | ||||
|  | ||||
|  | ||||
|  | ||||
| def workarounds_for_obfuscations(content): | ||||
|   | ||||
| @@ -5,6 +5,7 @@ from changedetectionio.notification import ( | ||||
|     default_notification_title, | ||||
| ) | ||||
|  | ||||
| # Equal to or greater than this number of FilterNotFoundInResponse exceptions will trigger a filter-not-found notification | ||||
| _FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT = 6 | ||||
| DEFAULT_SETTINGS_HEADERS_USERAGENT='Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36' | ||||
|  | ||||
|   | ||||
| @@ -1,19 +1,14 @@ | ||||
| from .Watch import base_config | ||||
| import uuid | ||||
|  | ||||
| class model(dict): | ||||
| from changedetectionio.model import watch_base | ||||
|  | ||||
|  | ||||
| class model(watch_base): | ||||
|  | ||||
|     def __init__(self, *arg, **kw): | ||||
|         super(model, self).__init__(*arg, **kw) | ||||
|  | ||||
|         self.update(base_config) | ||||
|  | ||||
|         self['uuid'] = str(uuid.uuid4()) | ||||
|         self['overrides_watch'] = kw.get('default', {}).get('overrides_watch') | ||||
|  | ||||
|         if kw.get('default'): | ||||
|             self.update(kw['default']) | ||||
|             del kw['default'] | ||||
|  | ||||
|  | ||||
|         # Goes at the end so we update the default object with the initialiser | ||||
|         super(model, self).__init__(*arg, **kw) | ||||
|  | ||||
|   | ||||
| @@ -1,10 +1,8 @@ | ||||
| from changedetectionio.strtobool import strtobool | ||||
| from changedetectionio.safe_jinja import render as jinja_render | ||||
|  | ||||
| from . import watch_base | ||||
| import os | ||||
| import re | ||||
| import time | ||||
| import uuid | ||||
| from pathlib import Path | ||||
| from loguru import logger | ||||
|  | ||||
| @@ -15,69 +13,6 @@ SAFE_PROTOCOL_REGEX='^(http|https|ftp|file):' | ||||
| minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 3)) | ||||
| mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7} | ||||
|  | ||||
| from changedetectionio.notification import ( | ||||
|     default_notification_format_for_watch | ||||
| ) | ||||
|  | ||||
| base_config = { | ||||
|     'body': None, | ||||
|     'browser_steps': [], | ||||
|     'browser_steps_last_error_step': None, | ||||
|     'check_unique_lines': False,  # On change-detected, compare against all history if its something new | ||||
|     'check_count': 0, | ||||
|     'date_created': None, | ||||
|     'consecutive_filter_failures': 0,  # Every time the CSS/xPath filter cannot be located, reset when all is fine. | ||||
|     'extract_text': [],  # Extract text by regex after filters | ||||
|     'extract_title_as_title': False, | ||||
|     'fetch_backend': 'system', # plaintext, playwright etc | ||||
|     'fetch_time': 0.0, | ||||
|     'processor': 'text_json_diff', # could be restock_diff or others from .processors | ||||
|     'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')), | ||||
|     'filter_text_added': True, | ||||
|     'filter_text_replaced': True, | ||||
|     'filter_text_removed': True, | ||||
|     'has_ldjson_price_data': None, | ||||
|     'track_ldjson_price_data': None, | ||||
|     'headers': {},  # Extra headers to send | ||||
|     'ignore_text': [],  # List of text to ignore when calculating the comparison checksum | ||||
|     'in_stock' : None, | ||||
|     'in_stock_only' : True, # Only trigger change on going to instock from out-of-stock | ||||
|     'include_filters': [], | ||||
|     'last_checked': 0, | ||||
|     'last_error': False, | ||||
|     'last_viewed': 0,  # history key value of the last viewed via the [diff] link | ||||
|     'method': 'GET', | ||||
|     'notification_alert_count': 0, | ||||
|     # Custom notification content | ||||
|     'notification_body': None, | ||||
|     'notification_format': default_notification_format_for_watch, | ||||
|     'notification_muted': False, | ||||
|     'notification_title': None, | ||||
|     'notification_screenshot': False,  # Include the latest screenshot if available and supported by the apprise URL | ||||
|     'notification_urls': [],  # List of URLs to add to the notification Queue (Usually AppRise) | ||||
|     'paused': False, | ||||
|     'previous_md5': False, | ||||
|     'previous_md5_before_filters': False,  # Used for skipping changedetection entirely | ||||
|     'proxy': None,  # Preferred proxy connection | ||||
|     'remote_server_reply': None, # From 'server' reply header | ||||
|     'sort_text_alphabetically': False, | ||||
|     'subtractive_selectors': [], | ||||
|     'tag': '', # Old system of text name for a tag, to be removed | ||||
|     'tags': [], # list of UUIDs to App.Tags | ||||
|     'text_should_not_be_present': [],  # Text that should not present | ||||
|     # Re #110, so then if this is set to None, we know to use the default value instead | ||||
|     # Requires setting to None on submit if it's the same as the default | ||||
|     # Should be all None by default, so we use the system default in this case. | ||||
|     'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None}, | ||||
|     'time_between_check_use_default': True, | ||||
|     'title': None, | ||||
|     'trigger_text': [],  # List of text or regex to wait for until a change is detected | ||||
|     'url': '', | ||||
|     'uuid': str(uuid.uuid4()), | ||||
|     'webdriver_delay': None, | ||||
|     'webdriver_js_execute_code': None,  # Run before change-detection | ||||
| } | ||||
|  | ||||
|  | ||||
| def is_safe_url(test_url): | ||||
|     # See https://github.com/dgtlmoon/changedetection.io/issues/1358 | ||||
| @@ -94,30 +29,26 @@ def is_safe_url(test_url): | ||||
|  | ||||
|     return True | ||||
|  | ||||
| class model(dict): | ||||
|  | ||||
| class model(watch_base): | ||||
|     __newest_history_key = None | ||||
|     __history_n = 0 | ||||
|     jitter_seconds = 0 | ||||
|  | ||||
|     def __init__(self, *arg, **kw): | ||||
|  | ||||
|         self.update(base_config) | ||||
|         self.__datastore_path = kw['datastore_path'] | ||||
|  | ||||
|         self['uuid'] = str(uuid.uuid4()) | ||||
|  | ||||
|         del kw['datastore_path'] | ||||
|  | ||||
|         super(model, self).__init__(*arg, **kw) | ||||
|         if kw.get('default'): | ||||
|             self.update(kw['default']) | ||||
|             del kw['default'] | ||||
|  | ||||
|         if self.get('default'): | ||||
|             del self['default'] | ||||
|  | ||||
|         # Be sure the cached timestamp is ready | ||||
|         bump = self.history | ||||
|  | ||||
|         # Goes at the end so we update the default object with the initialiser | ||||
|         super(model, self).__init__(*arg, **kw) | ||||
|  | ||||
|     @property | ||||
|     def viewed(self): | ||||
|         # Don't return viewed when last_viewed is 0 and newest_key is 0 | ||||
| @@ -157,6 +88,33 @@ class model(dict): | ||||
|             ready_url=ready_url.replace('source:', '') | ||||
|         return ready_url | ||||
|  | ||||
|     def clear_watch(self): | ||||
|         import pathlib | ||||
|  | ||||
|         # JSON Data, Screenshots, Textfiles (history index and snapshots), HTML in the future etc | ||||
|         for item in pathlib.Path(str(self.watch_data_dir)).rglob("*.*"): | ||||
|             os.unlink(item) | ||||
|  | ||||
|         # Force the attr to recalculate | ||||
|         bump = self.history | ||||
|  | ||||
|         # Do this last because it will trigger a recheck due to last_checked being zero | ||||
|         self.update({ | ||||
|             'browser_steps_last_error_step': None, | ||||
|             'check_count': 0, | ||||
|             'fetch_time': 0.0, | ||||
|             'has_ldjson_price_data': None, | ||||
|             'last_checked': 0, | ||||
|             'last_error': False, | ||||
|             'last_notification_error': False, | ||||
|             'last_viewed': 0, | ||||
|             'previous_md5': False, | ||||
|             'previous_md5_before_filters': False, | ||||
|             'remote_server_reply': None, | ||||
|             'track_ldjson_price_data': None | ||||
|         }) | ||||
|         return | ||||
|  | ||||
|     @property | ||||
|     def is_source_type_url(self): | ||||
|         return self.get('url', '').startswith('source:') | ||||
| @@ -238,6 +196,8 @@ class model(dict): | ||||
|  | ||||
|         if len(tmp_history): | ||||
|             self.__newest_history_key = list(tmp_history.keys())[-1] | ||||
|         else: | ||||
|             self.__newest_history_key = None | ||||
|  | ||||
|         self.__history_n = len(tmp_history) | ||||
|  | ||||
| @@ -256,6 +216,13 @@ class model(dict): | ||||
|  | ||||
|         return has_browser_steps | ||||
|  | ||||
|     @property | ||||
|     def has_restock_info(self): | ||||
|         if self.get('restock') and self['restock'].get('in_stock') != None: | ||||
|                 return True | ||||
|  | ||||
|         return False | ||||
|  | ||||
|     # Returns the newest key, but if theres only 1 record, then it's counted as not being new, so return 0. | ||||
|     @property | ||||
|     def newest_history_key(self): | ||||
| @@ -328,14 +295,9 @@ class model(dict): | ||||
|     def save_history_text(self, contents, timestamp, snapshot_id): | ||||
|         import brotli | ||||
|  | ||||
|         self.ensure_data_dir_exists() | ||||
|         logger.trace(f"{self.get('uuid')} - Updating history.txt with timestamp {timestamp}") | ||||
|  | ||||
|         # Small hack so that we sleep just enough to allow 1 second  between history snapshots | ||||
|         # this is because history.txt indexes/keys snapshots by epoch seconds and we dont want dupe keys | ||||
|         if self.__newest_history_key and int(timestamp) == int(self.__newest_history_key): | ||||
|             logger.warning(f"Timestamp {timestamp} already exists, waiting 1 seconds so we have a unique key in history.txt") | ||||
|             timestamp = str(int(timestamp) + 1) | ||||
|             time.sleep(1) | ||||
|         self.ensure_data_dir_exists() | ||||
|  | ||||
|         threshold = int(os.getenv('SNAPSHOT_BROTLI_COMPRESSION_THRESHOLD', 1024)) | ||||
|         skip_brotli = strtobool(os.getenv('DISABLE_BROTLI_TEXT_SNAPSHOT', 'False')) | ||||
| @@ -470,6 +432,17 @@ class model(dict): | ||||
|     def toggle_mute(self): | ||||
|         self['notification_muted'] ^= True | ||||
|  | ||||
|     def extra_notification_token_values(self): | ||||
|         # Used for providing extra tokens | ||||
|         # return {'widget': 555} | ||||
|         return {} | ||||
|  | ||||
|     def extra_notification_token_placeholder_info(self): | ||||
|         # Used for providing extra tokens | ||||
|         # return [('widget', "Get widget amounts")] | ||||
|         return [] | ||||
|  | ||||
|  | ||||
|     def extract_regex_from_all_history(self, regex): | ||||
|         import csv | ||||
|         import re | ||||
| @@ -528,8 +501,42 @@ class model(dict): | ||||
|         # None is set | ||||
|         return False | ||||
|  | ||||
|     def save_error_text(self, contents): | ||||
|         self.ensure_data_dir_exists() | ||||
|         target_path = os.path.join(self.watch_data_dir, "last-error.txt") | ||||
|         with open(target_path, 'w') as f: | ||||
|             f.write(contents) | ||||
|  | ||||
|     def get_last_fetched_before_filters(self): | ||||
|     def save_xpath_data(self, data, as_error=False): | ||||
|         import json | ||||
|  | ||||
|         if as_error: | ||||
|             target_path = os.path.join(self.watch_data_dir, "elements-error.json") | ||||
|         else: | ||||
|             target_path = os.path.join(self.watch_data_dir, "elements.json") | ||||
|  | ||||
|         self.ensure_data_dir_exists() | ||||
|  | ||||
|         with open(target_path, 'w') as f: | ||||
|             f.write(json.dumps(data)) | ||||
|             f.close() | ||||
|  | ||||
|     # Save as PNG, PNG is larger but better for doing visual diff in the future | ||||
|     def save_screenshot(self, screenshot: bytes, as_error=False): | ||||
|  | ||||
|         if as_error: | ||||
|             target_path = os.path.join(self.watch_data_dir, "last-error-screenshot.png") | ||||
|         else: | ||||
|             target_path = os.path.join(self.watch_data_dir, "last-screenshot.png") | ||||
|  | ||||
|         self.ensure_data_dir_exists() | ||||
|  | ||||
|         with open(target_path, 'wb') as f: | ||||
|             f.write(screenshot) | ||||
|             f.close() | ||||
|  | ||||
|  | ||||
|     def get_last_fetched_text_before_filters(self): | ||||
|         import brotli | ||||
|         filepath = os.path.join(self.watch_data_dir, 'last-fetched.br') | ||||
|  | ||||
| @@ -544,12 +551,56 @@ class model(dict): | ||||
|         with open(filepath, 'rb') as f: | ||||
|             return(brotli.decompress(f.read()).decode('utf-8')) | ||||
|  | ||||
|     def save_last_fetched_before_filters(self, contents): | ||||
|     def save_last_text_fetched_before_filters(self, contents): | ||||
|         import brotli | ||||
|         filepath = os.path.join(self.watch_data_dir, 'last-fetched.br') | ||||
|         with open(filepath, 'wb') as f: | ||||
|             f.write(brotli.compress(contents, mode=brotli.MODE_TEXT)) | ||||
|  | ||||
|     def save_last_fetched_html(self, timestamp, contents): | ||||
|         import brotli | ||||
|  | ||||
|         self.ensure_data_dir_exists() | ||||
|         snapshot_fname = f"{timestamp}.html.br" | ||||
|         filepath = os.path.join(self.watch_data_dir, snapshot_fname) | ||||
|  | ||||
|         with open(filepath, 'wb') as f: | ||||
|             contents = contents.encode('utf-8') if isinstance(contents, str) else contents | ||||
|             try: | ||||
|                 f.write(brotli.compress(contents)) | ||||
|             except Exception as e: | ||||
|                 logger.warning(f"{self.get('uuid')} - Unable to compress snapshot, saving as raw data to {filepath}") | ||||
|                 logger.warning(e) | ||||
|                 f.write(contents) | ||||
|  | ||||
|         self._prune_last_fetched_html_snapshots() | ||||
|  | ||||
|     def get_fetched_html(self, timestamp): | ||||
|         import brotli | ||||
|  | ||||
|         snapshot_fname = f"{timestamp}.html.br" | ||||
|         filepath = os.path.join(self.watch_data_dir, snapshot_fname) | ||||
|         if os.path.isfile(filepath): | ||||
|             with open(filepath, 'rb') as f: | ||||
|                 return (brotli.decompress(f.read()).decode('utf-8')) | ||||
|  | ||||
|         return False | ||||
|  | ||||
|  | ||||
|     def _prune_last_fetched_html_snapshots(self): | ||||
|  | ||||
|         dates = list(self.history.keys()) | ||||
|         dates.reverse() | ||||
|  | ||||
|         for index, timestamp in enumerate(dates): | ||||
|             snapshot_fname = f"{timestamp}.html.br" | ||||
|             filepath = os.path.join(self.watch_data_dir, snapshot_fname) | ||||
|  | ||||
|             # Keep only the first 2 | ||||
|             if index > 1 and os.path.isfile(filepath): | ||||
|                 os.remove(filepath) | ||||
|  | ||||
|  | ||||
|     @property | ||||
|     def get_browsersteps_available_screenshots(self): | ||||
|         "For knowing which screenshots are available to show the user in BrowserSteps UI" | ||||
|   | ||||
| @@ -0,0 +1,76 @@ | ||||
| import os | ||||
| import uuid | ||||
|  | ||||
| from changedetectionio import strtobool | ||||
| from changedetectionio.notification import default_notification_format_for_watch | ||||
|  | ||||
| class watch_base(dict): | ||||
|  | ||||
|     def __init__(self, *arg, **kw): | ||||
|         self.update({ | ||||
|             # Custom notification content | ||||
|             # Re #110, so then if this is set to None, we know to use the default value instead | ||||
|             # Requires setting to None on submit if it's the same as the default | ||||
|             # Should be all None by default, so we use the system default in this case. | ||||
|             'body': None, | ||||
|             'browser_steps': [], | ||||
|             'browser_steps_last_error_step': None, | ||||
|             'check_count': 0, | ||||
|             'check_unique_lines': False,  # On change-detected, compare against all history if its something new | ||||
|             'consecutive_filter_failures': 0,  # Every time the CSS/xPath filter cannot be located, reset when all is fine. | ||||
|             'content-type': None, | ||||
|             'date_created': None, | ||||
|             'extract_text': [],  # Extract text by regex after filters | ||||
|             'extract_title_as_title': False, | ||||
|             'fetch_backend': 'system',  # plaintext, playwright etc | ||||
|             'fetch_time': 0.0, | ||||
|             'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')), | ||||
|             'filter_text_added': True, | ||||
|             'filter_text_removed': True, | ||||
|             'filter_text_replaced': True, | ||||
|             'follow_price_changes': True, | ||||
|             'has_ldjson_price_data': None, | ||||
|             'headers': {},  # Extra headers to send | ||||
|             'ignore_text': [],  # List of text to ignore when calculating the comparison checksum | ||||
|             'in_stock_only': True,  # Only trigger change on going to instock from out-of-stock | ||||
|             'include_filters': [], | ||||
|             'last_checked': 0, | ||||
|             'last_error': False, | ||||
|             'last_viewed': 0,  # history key value of the last viewed via the [diff] link | ||||
|             'method': 'GET', | ||||
|             'notification_alert_count': 0, | ||||
|             'notification_body': None, | ||||
|             'notification_format': default_notification_format_for_watch, | ||||
|             'notification_muted': False, | ||||
|             'notification_screenshot': False,  # Include the latest screenshot if available and supported by the apprise URL | ||||
|             'notification_title': None, | ||||
|             'notification_urls': [],  # List of URLs to add to the notification Queue (Usually AppRise) | ||||
|             'paused': False, | ||||
|             'previous_md5': False, | ||||
|             'previous_md5_before_filters': False,  # Used for skipping changedetection entirely | ||||
|             'processor': 'text_json_diff',  # could be restock_diff or others from .processors | ||||
|             'price_change_threshold_percent': None, | ||||
|             'proxy': None,  # Preferred proxy connection | ||||
|             'remote_server_reply': None,  # From 'server' reply header | ||||
|             'sort_text_alphabetically': False, | ||||
|             'subtractive_selectors': [], | ||||
|             'tag': '',  # Old system of text name for a tag, to be removed | ||||
|             'tags': [],  # list of UUIDs to App.Tags | ||||
|             'text_should_not_be_present': [],  # Text that should not present | ||||
|             'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None}, | ||||
|             'time_between_check_use_default': True, | ||||
|             'title': None, | ||||
|             'track_ldjson_price_data': None, | ||||
|             'trim_text_whitespace': False, | ||||
|             'remove_duplicate_lines': False, | ||||
|             'trigger_text': [],  # List of text or regex to wait for until a change is detected | ||||
|             'url': '', | ||||
|             'uuid': str(uuid.uuid4()), | ||||
|             'webdriver_delay': None, | ||||
|             'webdriver_js_execute_code': None,  # Run before change-detection | ||||
|         }) | ||||
|  | ||||
|         super(watch_base, self).__init__(*arg, **kw) | ||||
|  | ||||
|         if self.get('default'): | ||||
|             del self['default'] | ||||
| @@ -1,9 +1,10 @@ | ||||
| import apprise | ||||
|  | ||||
| import time | ||||
| from apprise import NotifyFormat | ||||
| import json | ||||
| import apprise | ||||
| from loguru import logger | ||||
|  | ||||
|  | ||||
| valid_tokens = { | ||||
|     'base_url': '', | ||||
|     'current_snapshot': '', | ||||
| @@ -34,86 +35,11 @@ valid_notification_formats = { | ||||
|     default_notification_format_for_watch: default_notification_format_for_watch | ||||
| } | ||||
|  | ||||
| # include the decorator | ||||
| from apprise.decorators import notify | ||||
|  | ||||
| @notify(on="delete") | ||||
| @notify(on="deletes") | ||||
| @notify(on="get") | ||||
| @notify(on="gets") | ||||
| @notify(on="post") | ||||
| @notify(on="posts") | ||||
| @notify(on="put") | ||||
| @notify(on="puts") | ||||
| def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs): | ||||
|     import requests | ||||
|     from apprise.utils import parse_url as apprise_parse_url | ||||
|     from apprise import URLBase | ||||
|  | ||||
|     url = kwargs['meta'].get('url') | ||||
|  | ||||
|     if url.startswith('post'): | ||||
|         r = requests.post | ||||
|     elif url.startswith('get'): | ||||
|         r = requests.get | ||||
|     elif url.startswith('put'): | ||||
|         r = requests.put | ||||
|     elif url.startswith('delete'): | ||||
|         r = requests.delete | ||||
|  | ||||
|     url = url.replace('post://', 'http://') | ||||
|     url = url.replace('posts://', 'https://') | ||||
|     url = url.replace('put://', 'http://') | ||||
|     url = url.replace('puts://', 'https://') | ||||
|     url = url.replace('get://', 'http://') | ||||
|     url = url.replace('gets://', 'https://') | ||||
|     url = url.replace('put://', 'http://') | ||||
|     url = url.replace('puts://', 'https://') | ||||
|     url = url.replace('delete://', 'http://') | ||||
|     url = url.replace('deletes://', 'https://') | ||||
|  | ||||
|     headers = {} | ||||
|     params = {} | ||||
|     auth = None | ||||
|  | ||||
|     # Convert /foobar?+some-header=hello to proper header dictionary | ||||
|     results = apprise_parse_url(url) | ||||
|     if results: | ||||
|         # Add our headers that the user can potentially over-ride if they wish | ||||
|         # to to our returned result set and tidy entries by unquoting them | ||||
|         headers = {URLBase.unquote(x): URLBase.unquote(y) | ||||
|                    for x, y in results['qsd+'].items()} | ||||
|  | ||||
|         # https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation | ||||
|         # In Apprise, it relies on prefixing each request arg with "-", because it uses say &method=update as a flag for apprise | ||||
|         # but here we are making straight requests, so we need todo convert this against apprise's logic | ||||
|         for k, v in results['qsd'].items(): | ||||
|             if not k.strip('+-') in results['qsd+'].keys(): | ||||
|                 params[URLBase.unquote(k)] = URLBase.unquote(v) | ||||
|  | ||||
|         # Determine Authentication | ||||
|         auth = '' | ||||
|         if results.get('user') and results.get('password'): | ||||
|             auth = (URLBase.unquote(results.get('user')), URLBase.unquote(results.get('user'))) | ||||
|         elif results.get('user'): | ||||
|             auth = (URLBase.unquote(results.get('user'))) | ||||
|  | ||||
|     # Try to auto-guess if it's JSON | ||||
|     try: | ||||
|         json.loads(body) | ||||
|         headers['Content-Type'] = 'application/json; charset=utf-8' | ||||
|     except ValueError as e: | ||||
|         pass | ||||
|  | ||||
|     r(results.get('url'), | ||||
|       auth=auth, | ||||
|       data=body, | ||||
|       headers=headers, | ||||
|       params=params | ||||
|       ) | ||||
|  | ||||
|  | ||||
| def process_notification(n_object, datastore): | ||||
|     # so that the custom endpoints are registered | ||||
|     from changedetectionio.apprise_plugin import apprise_custom_api_call_wrapper | ||||
|  | ||||
|     from .safe_jinja import render as jinja_render | ||||
|     now = time.time() | ||||
| @@ -157,7 +83,7 @@ def process_notification(n_object, datastore): | ||||
|                 logger.warning(f"Process Notification: skipping empty notification URL.") | ||||
|                 continue | ||||
|  | ||||
|             logger.info(">> Process Notification: AppRise notifying {}".format(url)) | ||||
|             logger.info(f">> Process Notification: AppRise notifying {url}") | ||||
|             url = jinja_render(template_str=url, **notification_parameters) | ||||
|  | ||||
|             # Re 323 - Limit discord length to their 2000 char limit total or it wont send. | ||||
| @@ -230,6 +156,7 @@ def process_notification(n_object, datastore): | ||||
|         log_value = logs.getvalue() | ||||
|  | ||||
|         if log_value and 'WARNING' in log_value or 'ERROR' in log_value: | ||||
|             logger.critical(log_value) | ||||
|             raise Exception(log_value) | ||||
|  | ||||
|     # Return what was sent for better logging - after the for loop | ||||
| @@ -272,19 +199,18 @@ def create_notification_parameters(n_object, datastore): | ||||
|     tokens.update( | ||||
|         { | ||||
|             'base_url': base_url, | ||||
|             'current_snapshot': n_object.get('current_snapshot', ''), | ||||
|             'diff': n_object.get('diff', ''),  # Null default in the case we use a test | ||||
|             'diff_added': n_object.get('diff_added', ''),  # Null default in the case we use a test | ||||
|             'diff_full': n_object.get('diff_full', ''),  # Null default in the case we use a test | ||||
|             'diff_patch': n_object.get('diff_patch', ''),  # Null default in the case we use a test | ||||
|             'diff_removed': n_object.get('diff_removed', ''),  # Null default in the case we use a test | ||||
|             'diff_url': diff_url, | ||||
|             'preview_url': preview_url, | ||||
|             'triggered_text': n_object.get('triggered_text', ''), | ||||
|             'watch_tag': watch_tag if watch_tag is not None else '', | ||||
|             'watch_title': watch_title if watch_title is not None else '', | ||||
|             'watch_url': watch_url, | ||||
|             'watch_uuid': uuid, | ||||
|         }) | ||||
|  | ||||
|     # n_object will contain diff, diff_added etc etc | ||||
|     tokens.update(n_object) | ||||
|  | ||||
|     if uuid: | ||||
|         tokens.update(datastore.data['watching'].get(uuid).extra_notification_token_values()) | ||||
|  | ||||
|     return tokens | ||||
|   | ||||
| @@ -8,4 +8,8 @@ The concept here is to be able to switch between different domain specific probl | ||||
| Some suggestions for the future | ||||
|  | ||||
| - `graphical`  | ||||
| - `restock_and_price` - extract price AND stock text | ||||
|  | ||||
| ## Todo | ||||
|  | ||||
| - Make each processor return a extra list of sub-processed (so you could configure a single processor in different ways) | ||||
| - move restock_diff to its own pip/github repo | ||||
|   | ||||
| @@ -1,10 +1,16 @@ | ||||
| from abc import abstractmethod | ||||
|  | ||||
| from changedetectionio.content_fetchers.base import Fetcher | ||||
| from changedetectionio.strtobool import strtobool | ||||
|  | ||||
| from copy import deepcopy | ||||
| from loguru import logger | ||||
| import hashlib | ||||
| import os | ||||
| import re | ||||
| import importlib | ||||
| import pkgutil | ||||
| import inspect | ||||
|  | ||||
| class difference_detection_processor(): | ||||
|  | ||||
| @@ -19,9 +25,12 @@ class difference_detection_processor(): | ||||
|         super().__init__(*args, **kwargs) | ||||
|         self.datastore = datastore | ||||
|         self.watch = deepcopy(self.datastore.data['watching'].get(watch_uuid)) | ||||
|         # Generic fetcher that should be extended (requests, playwright etc) | ||||
|         self.fetcher = Fetcher() | ||||
|  | ||||
|     def call_browser(self): | ||||
|         from requests.structures import CaseInsensitiveDict | ||||
|  | ||||
|         # Protect against file:// access | ||||
|         if re.search(r'^file://', self.watch.get('url', '').strip(), re.IGNORECASE): | ||||
|             if not strtobool(os.getenv('ALLOW_FILE_URI', 'false')): | ||||
| @@ -129,8 +138,18 @@ class difference_detection_processor(): | ||||
|         is_binary = self.watch.is_pdf | ||||
|  | ||||
|         # And here we go! call the right browser with browser-specific settings | ||||
|         self.fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, self.watch.get('include_filters'), | ||||
|                     is_binary=is_binary) | ||||
|         empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False) | ||||
|  | ||||
|         self.fetcher.run(url=url, | ||||
|                          timeout=timeout, | ||||
|                          request_headers=request_headers, | ||||
|                          request_body=request_body, | ||||
|                          request_method=request_method, | ||||
|                          ignore_status_codes=ignore_status_codes, | ||||
|                          current_include_filters=self.watch.get('include_filters'), | ||||
|                          is_binary=is_binary, | ||||
|                          empty_pages_are_a_change=empty_pages_are_a_change | ||||
|                          ) | ||||
|  | ||||
|         #@todo .quit here could go on close object, so we can run JS if change-detected | ||||
|         self.fetcher.quit() | ||||
| @@ -138,16 +157,91 @@ class difference_detection_processor(): | ||||
|         # After init, call run_changedetection() which will do the actual change-detection | ||||
|  | ||||
|     @abstractmethod | ||||
|     def run_changedetection(self, uuid, skip_when_checksum_same=True): | ||||
|     def run_changedetection(self, watch, skip_when_checksum_same=True): | ||||
|         update_obj = {'last_notification_error': False, 'last_error': False} | ||||
|         some_data = 'xxxxx' | ||||
|         update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest() | ||||
|         changed_detected = False | ||||
|         return changed_detected, update_obj, ''.encode('utf-8') | ||||
|         return changed_detected, update_obj, ''.encode('utf-8'), b'' | ||||
|  | ||||
|  | ||||
| def find_sub_packages(package_name): | ||||
|     """ | ||||
|     Find all sub-packages within the given package. | ||||
|  | ||||
|     :param package_name: The name of the base package to scan for sub-packages. | ||||
|     :return: A list of sub-package names. | ||||
|     """ | ||||
|     package = importlib.import_module(package_name) | ||||
|     return [name for _, name, is_pkg in pkgutil.iter_modules(package.__path__) if is_pkg] | ||||
|  | ||||
|  | ||||
| def find_processors(): | ||||
|     """ | ||||
|     Find all subclasses of DifferenceDetectionProcessor in the specified package. | ||||
|  | ||||
|     :param package_name: The name of the package to scan for processor modules. | ||||
|     :return: A list of (module, class) tuples. | ||||
|     """ | ||||
|     package_name = "changedetectionio.processors"  # Name of the current package/module | ||||
|  | ||||
|     processors = [] | ||||
|     sub_packages = find_sub_packages(package_name) | ||||
|  | ||||
|     for sub_package in sub_packages: | ||||
|         module_name = f"{package_name}.{sub_package}.processor" | ||||
|         try: | ||||
|             module = importlib.import_module(module_name) | ||||
|  | ||||
|             # Iterate through all classes in the module | ||||
|             for name, obj in inspect.getmembers(module, inspect.isclass): | ||||
|                 if issubclass(obj, difference_detection_processor) and obj is not difference_detection_processor: | ||||
|                     processors.append((module, sub_package)) | ||||
|         except (ModuleNotFoundError, ImportError) as e: | ||||
|             logger.warning(f"Failed to import module {module_name}: {e} (find_processors())") | ||||
|  | ||||
|     return processors | ||||
|  | ||||
|  | ||||
| def get_parent_module(module): | ||||
|     module_name = module.__name__ | ||||
|     if '.' not in module_name: | ||||
|         return None  # Top-level module has no parent | ||||
|     parent_module_name = module_name.rsplit('.', 1)[0] | ||||
|     try: | ||||
|         return importlib.import_module(parent_module_name) | ||||
|     except Exception as e: | ||||
|         pass | ||||
|  | ||||
|     return False | ||||
|  | ||||
|  | ||||
|  | ||||
| def get_custom_watch_obj_for_processor(processor_name): | ||||
|     from changedetectionio.model import Watch | ||||
|     watch_class = Watch.model | ||||
|     processor_classes = find_processors() | ||||
|     custom_watch_obj = next((tpl for tpl in processor_classes if tpl[1] == processor_name), None) | ||||
|     if custom_watch_obj: | ||||
|         # Parent of .processor.py COULD have its own Watch implementation | ||||
|         parent_module = get_parent_module(custom_watch_obj[0]) | ||||
|         if hasattr(parent_module, 'Watch'): | ||||
|             watch_class = parent_module.Watch | ||||
|  | ||||
|     return watch_class | ||||
|  | ||||
|  | ||||
| def available_processors(): | ||||
|     from . import restock_diff, text_json_diff | ||||
|     x=[('text_json_diff', text_json_diff.name), ('restock_diff', restock_diff.name)] | ||||
|     # @todo Make this smarter with introspection of sorts. | ||||
|     return x | ||||
|     """ | ||||
|     Get a list of processors by name and description for the UI elements | ||||
|     :return: A list :) | ||||
|     """ | ||||
|  | ||||
|     processor_classes = find_processors() | ||||
|  | ||||
|     available = [] | ||||
|     for package, processor_class in processor_classes: | ||||
|         available.append((processor_class, package.name)) | ||||
|  | ||||
|     return available | ||||
|  | ||||
|   | ||||
							
								
								
									
										10
									
								
								changedetectionio/processors/exceptions.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								changedetectionio/processors/exceptions.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,10 @@ | ||||
| class ProcessorException(Exception): | ||||
|     def __init__(self, message=None, status_code=None, url=None, screenshot=None, has_filters=False, html_content='', xpath_data=None): | ||||
|         self.message = message | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         self.screenshot = screenshot | ||||
|         self.has_filters = has_filters | ||||
|         self.html_content = html_content | ||||
|         self.xpath_data = xpath_data | ||||
|         return | ||||
| @@ -1,66 +0,0 @@ | ||||
|  | ||||
| from . import difference_detection_processor | ||||
| from copy import deepcopy | ||||
| from loguru import logger | ||||
| import hashlib | ||||
| import urllib3 | ||||
|  | ||||
| urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) | ||||
|  | ||||
| name = 'Re-stock detection for single product pages' | ||||
| description = 'Detects if the product goes back to in-stock' | ||||
|  | ||||
| class UnableToExtractRestockData(Exception): | ||||
|     def __init__(self, status_code): | ||||
|         # Set this so we can use it in other parts of the app | ||||
|         self.status_code = status_code | ||||
|         return | ||||
|  | ||||
| class perform_site_check(difference_detection_processor): | ||||
|     screenshot = None | ||||
|     xpath_data = None | ||||
|  | ||||
|     def run_changedetection(self, uuid, skip_when_checksum_same=True): | ||||
|  | ||||
|         # DeepCopy so we can be sure we don't accidently change anything by reference | ||||
|         watch = deepcopy(self.datastore.data['watching'].get(uuid)) | ||||
|  | ||||
|         if not watch: | ||||
|             raise Exception("Watch no longer exists.") | ||||
|  | ||||
|         # Unset any existing notification error | ||||
|         update_obj = {'last_notification_error': False, 'last_error': False} | ||||
|  | ||||
|         self.screenshot = self.fetcher.screenshot | ||||
|         self.xpath_data = self.fetcher.xpath_data | ||||
|  | ||||
|         # Track the content type | ||||
|         update_obj['content_type'] = self.fetcher.headers.get('Content-Type', '') | ||||
|         update_obj["last_check_status"] = self.fetcher.get_last_status_code() | ||||
|  | ||||
|         # Main detection method | ||||
|         fetched_md5 = None | ||||
|         if self.fetcher.instock_data: | ||||
|             fetched_md5 = hashlib.md5(self.fetcher.instock_data.encode('utf-8')).hexdigest() | ||||
|             # 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold. | ||||
|             update_obj["in_stock"] = True if self.fetcher.instock_data == 'Possibly in stock' else False | ||||
|             logger.debug(f"Watch UUID {uuid} restock check returned '{self.fetcher.instock_data}' from JS scraper.") | ||||
|         else: | ||||
|             raise UnableToExtractRestockData(status_code=self.fetcher.status_code) | ||||
|  | ||||
|         # The main thing that all this at the moment comes down to :) | ||||
|         changed_detected = False | ||||
|         logger.debug(f"Watch UUID {uuid} restock check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}") | ||||
|  | ||||
|         if watch.get('previous_md5') and watch.get('previous_md5') != fetched_md5: | ||||
|             # Yes if we only care about it going to instock, AND we are in stock | ||||
|             if watch.get('in_stock_only') and update_obj["in_stock"]: | ||||
|                 changed_detected = True | ||||
|  | ||||
|             if not watch.get('in_stock_only'): | ||||
|                 # All cases | ||||
|                 changed_detected = True | ||||
|  | ||||
|         # Always record the new checksum | ||||
|         update_obj["previous_md5"] = fetched_md5 | ||||
|         return changed_detected, update_obj, self.fetcher.instock_data.encode('utf-8').strip() | ||||
							
								
								
									
										84
									
								
								changedetectionio/processors/restock_diff/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										84
									
								
								changedetectionio/processors/restock_diff/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,84 @@ | ||||
|  | ||||
| from babel.numbers import parse_decimal | ||||
| from changedetectionio.model.Watch import model as BaseWatch | ||||
| from typing import Union | ||||
| import re | ||||
|  | ||||
| class Restock(dict): | ||||
|  | ||||
|     def parse_currency(self, raw_value: str) -> Union[float, None]: | ||||
|         # Clean and standardize the value (ie 1,400.00 should be 1400.00), even better would be store the whole thing as an integer. | ||||
|         standardized_value = raw_value | ||||
|  | ||||
|         if ',' in standardized_value and '.' in standardized_value: | ||||
|             # Identify the correct decimal separator | ||||
|             if standardized_value.rfind('.') > standardized_value.rfind(','): | ||||
|                 standardized_value = standardized_value.replace(',', '') | ||||
|             else: | ||||
|                 standardized_value = standardized_value.replace('.', '').replace(',', '.') | ||||
|         else: | ||||
|             standardized_value = standardized_value.replace(',', '.') | ||||
|  | ||||
|         # Remove any non-numeric characters except for the decimal point | ||||
|         standardized_value = re.sub(r'[^\d.-]', '', standardized_value) | ||||
|  | ||||
|         if standardized_value: | ||||
|             # Convert to float | ||||
|             return float(parse_decimal(standardized_value, locale='en')) | ||||
|  | ||||
|         return None | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         # Define default values | ||||
|         default_values = { | ||||
|             'in_stock': None, | ||||
|             'price': None, | ||||
|             'currency': None, | ||||
|             'original_price': None | ||||
|         } | ||||
|  | ||||
|         # Initialize the dictionary with default values | ||||
|         super().__init__(default_values) | ||||
|  | ||||
|         # Update with any provided positional arguments (dictionaries) | ||||
|         if args: | ||||
|             if len(args) == 1 and isinstance(args[0], dict): | ||||
|                 self.update(args[0]) | ||||
|             else: | ||||
|                 raise ValueError("Only one positional argument of type 'dict' is allowed") | ||||
|  | ||||
|     def __setitem__(self, key, value): | ||||
|         # Custom logic to handle setting price and original_price | ||||
|         if key == 'price' or key == 'original_price': | ||||
|             if isinstance(value, str): | ||||
|                 value = self.parse_currency(raw_value=value) | ||||
|  | ||||
|         super().__setitem__(key, value) | ||||
|  | ||||
| class Watch(BaseWatch): | ||||
|     def __init__(self, *arg, **kw): | ||||
|         super().__init__(*arg, **kw) | ||||
|         self['restock'] = Restock(kw['default']['restock']) if kw.get('default') and kw['default'].get('restock') else Restock() | ||||
|  | ||||
|         self['restock_settings'] = kw['default']['restock_settings'] if kw.get('default',{}).get('restock_settings') else { | ||||
|             'follow_price_changes': True, | ||||
|             'in_stock_processing' : 'in_stock_only' | ||||
|         } #@todo update | ||||
|  | ||||
|     def clear_watch(self): | ||||
|         super().clear_watch() | ||||
|         self.update({'restock': Restock()}) | ||||
|  | ||||
|     def extra_notification_token_values(self): | ||||
|         values = super().extra_notification_token_values() | ||||
|         values['restock'] = self.get('restock', {}) | ||||
|         return values | ||||
|  | ||||
|     def extra_notification_token_placeholder_info(self): | ||||
|         values = super().extra_notification_token_placeholder_info() | ||||
|  | ||||
|         values.append(('restock.price', "Price detected")) | ||||
|         values.append(('restock.original_price', "Original price at first check")) | ||||
|  | ||||
|         return values | ||||
|  | ||||
							
								
								
									
										81
									
								
								changedetectionio/processors/restock_diff/forms.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										81
									
								
								changedetectionio/processors/restock_diff/forms.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,81 @@ | ||||
| from wtforms import ( | ||||
|     BooleanField, | ||||
|     validators, | ||||
|     FloatField | ||||
| ) | ||||
| from wtforms.fields.choices import RadioField | ||||
| from wtforms.fields.form import FormField | ||||
| from wtforms.form import Form | ||||
|  | ||||
| from changedetectionio.forms import processor_text_json_diff_form | ||||
|  | ||||
|  | ||||
| class RestockSettingsForm(Form): | ||||
|     in_stock_processing = RadioField(label='Re-stock detection', choices=[ | ||||
|         ('in_stock_only', "In Stock only (Out Of Stock -> In Stock only)"), | ||||
|         ('all_changes', "Any availability changes"), | ||||
|         ('off', "Off, don't follow availability/restock"), | ||||
|     ], default="in_stock_only") | ||||
|  | ||||
|     price_change_min = FloatField('Below price to trigger notification', [validators.Optional()], | ||||
|                                   render_kw={"placeholder": "No limit", "size": "10"}) | ||||
|     price_change_max = FloatField('Above price to trigger notification', [validators.Optional()], | ||||
|                                   render_kw={"placeholder": "No limit", "size": "10"}) | ||||
|     price_change_threshold_percent = FloatField('Threshold in % for price changes since the original price', validators=[ | ||||
|  | ||||
|         validators.Optional(), | ||||
|         validators.NumberRange(min=0, max=100, message="Should be between 0 and 100"), | ||||
|     ], render_kw={"placeholder": "0%", "size": "5"}) | ||||
|  | ||||
|     follow_price_changes = BooleanField('Follow price changes', default=True) | ||||
|  | ||||
| class processor_settings_form(processor_text_json_diff_form): | ||||
|     restock_settings = FormField(RestockSettingsForm) | ||||
|  | ||||
|     def extra_tab_content(self): | ||||
|         return 'Restock & Price Detection' | ||||
|  | ||||
|     def extra_form_content(self): | ||||
|         output = "" | ||||
|  | ||||
|         if getattr(self, 'watch', None) and getattr(self, 'datastore'): | ||||
|             for tag_uuid in self.watch.get('tags'): | ||||
|                 tag = self.datastore.data['settings']['application']['tags'].get(tag_uuid, {}) | ||||
|                 if tag.get('overrides_watch'): | ||||
|                     # @todo - Quick and dirty, cant access 'url_for' here because its out of scope somehow | ||||
|                     output = f"""<p><strong>Note! A Group tag overrides the restock and price detection here.</strong></p><style>#restock-fieldset-price-group {{ opacity: 0.6; }}</style>""" | ||||
|  | ||||
|         output += """ | ||||
|         {% from '_helpers.html' import render_field, render_checkbox_field, render_button %} | ||||
|         <script>         | ||||
|             $(document).ready(function () { | ||||
|                 toggleOpacity('#restock_settings-follow_price_changes', '.price-change-minmax', true); | ||||
|             }); | ||||
|         </script> | ||||
|  | ||||
|         <fieldset id="restock-fieldset-price-group"> | ||||
|             <div class="pure-control-group"> | ||||
|                 <fieldset class="pure-group inline-radio"> | ||||
|                     {{ render_field(form.restock_settings.in_stock_processing) }} | ||||
|                 </fieldset> | ||||
|                 <fieldset class="pure-group"> | ||||
|                     {{ render_checkbox_field(form.restock_settings.follow_price_changes) }} | ||||
|                     <span class="pure-form-message-inline">Changes in price should trigger a notification</span> | ||||
|                 </fieldset> | ||||
|                 <fieldset class="pure-group price-change-minmax">                | ||||
|                     {{ render_field(form.restock_settings.price_change_min, placeholder=watch.get('restock', {}).get('price')) }} | ||||
|                     <span class="pure-form-message-inline">Minimum amount, Trigger a change/notification when the price drops <i>below</i> this value.</span> | ||||
|                 </fieldset> | ||||
|                 <fieldset class="pure-group price-change-minmax"> | ||||
|                     {{ render_field(form.restock_settings.price_change_max, placeholder=watch.get('restock', {}).get('price')) }} | ||||
|                     <span class="pure-form-message-inline">Maximum amount, Trigger a change/notification when the price rises <i>above</i> this value.</span> | ||||
|                 </fieldset> | ||||
|                 <fieldset class="pure-group price-change-minmax"> | ||||
|                     {{ render_field(form.restock_settings.price_change_threshold_percent) }} | ||||
|                     <span class="pure-form-message-inline">Price must change more than this % to trigger a change since the first check.</span><br> | ||||
|                     <span class="pure-form-message-inline">For example, If the product is $1,000 USD originally, <strong>2%</strong> would mean it has to change more than $20 since the first check.</span><br> | ||||
|                 </fieldset>                 | ||||
|             </div> | ||||
|         </fieldset> | ||||
|         """ | ||||
|         return output | ||||
							
								
								
									
										301
									
								
								changedetectionio/processors/restock_diff/processor.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										301
									
								
								changedetectionio/processors/restock_diff/processor.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,301 @@ | ||||
| from .. import difference_detection_processor | ||||
| from ..exceptions import ProcessorException | ||||
| from . import Restock | ||||
| from loguru import logger | ||||
|  | ||||
| import urllib3 | ||||
| import time | ||||
|  | ||||
| urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) | ||||
| name = 'Re-stock & Price detection for single product pages' | ||||
| description = 'Detects if the product goes back to in-stock' | ||||
|  | ||||
| class UnableToExtractRestockData(Exception): | ||||
|     def __init__(self, status_code): | ||||
|         # Set this so we can use it in other parts of the app | ||||
|         self.status_code = status_code | ||||
|         return | ||||
|  | ||||
| class MoreThanOnePriceFound(Exception): | ||||
|     def __init__(self): | ||||
|         return | ||||
|  | ||||
| def _search_prop_by_value(matches, value): | ||||
|     for properties in matches: | ||||
|         for prop in properties: | ||||
|             if value in prop[0]: | ||||
|                 return prop[1]  # Yield the desired value and exit the function | ||||
|  | ||||
| def _deduplicate_prices(data): | ||||
|     seen = set() | ||||
|     unique_data = [] | ||||
|  | ||||
|     for datum in data: | ||||
|         # Convert 'value' to float if it can be a numeric string, otherwise leave it as is | ||||
|         try: | ||||
|             normalized_value = float(datum.value) if isinstance(datum.value, str) and datum.value.replace('.', '', 1).isdigit() else datum.value | ||||
|         except ValueError: | ||||
|             normalized_value = datum.value | ||||
|  | ||||
|         # If the normalized value hasn't been seen yet, add it to unique data | ||||
|         if normalized_value not in seen: | ||||
|             unique_data.append(datum) | ||||
|             seen.add(normalized_value) | ||||
|      | ||||
|     return unique_data | ||||
|  | ||||
|  | ||||
| # should return Restock() | ||||
| # add casting? | ||||
| def get_itemprop_availability(html_content) -> Restock: | ||||
|     """ | ||||
|     Kind of funny/cool way to find price/availability in one many different possibilities. | ||||
|     Use 'extruct' to find any possible RDFa/microdata/json-ld data, make a JSON string from the output then search it. | ||||
|     """ | ||||
|     from jsonpath_ng import parse | ||||
|  | ||||
|     import re | ||||
|     now = time.time() | ||||
|     import extruct | ||||
|     logger.trace(f"Imported extruct module in {time.time() - now:.3f}s") | ||||
|  | ||||
|     now = time.time() | ||||
|  | ||||
|     # Extruct is very slow, I'm wondering if some ML is going to be faster (800ms on my i7), 'rdfa' seems to be the heaviest. | ||||
|     syntaxes = ['dublincore', 'json-ld', 'microdata', 'microformat', 'opengraph'] | ||||
|     try: | ||||
|         data = extruct.extract(html_content, syntaxes=syntaxes) | ||||
|     except Exception as e: | ||||
|         logger.warning(f"Unable to extract data, document parsing with extruct failed with {type(e).__name__} - {str(e)}") | ||||
|         return Restock() | ||||
|  | ||||
|     logger.trace(f"Extruct basic extract of all metadata done in {time.time() - now:.3f}s") | ||||
|  | ||||
|     # First phase, dead simple scanning of anything that looks useful | ||||
|     value = Restock() | ||||
|     if data: | ||||
|         logger.debug(f"Using jsonpath to find price/availability/etc") | ||||
|         price_parse = parse('$..(price|Price)') | ||||
|         pricecurrency_parse = parse('$..(pricecurrency|currency|priceCurrency )') | ||||
|         availability_parse = parse('$..(availability|Availability)') | ||||
|  | ||||
|         price_result = _deduplicate_prices(price_parse.find(data)) | ||||
|         if price_result: | ||||
|             # Right now, we just support single product items, maybe we will store the whole actual metadata seperately in teh future and | ||||
|             # parse that for the UI? | ||||
|             prices_found = set(str(item.value).replace('$', '') for item in price_result) | ||||
|             if len(price_result) > 1 and len(prices_found) > 1: | ||||
|                 # See of all prices are different, in the case that one product has many embedded data types with the same price | ||||
|                 # One might have $121.95 and another 121.95 etc | ||||
|                 logger.warning(f"More than one price found {prices_found}, throwing exception, cant use this plugin.") | ||||
|                 raise MoreThanOnePriceFound() | ||||
|  | ||||
|             value['price'] = price_result[0].value | ||||
|  | ||||
|         pricecurrency_result = pricecurrency_parse.find(data) | ||||
|         if pricecurrency_result: | ||||
|             value['currency'] = pricecurrency_result[0].value | ||||
|  | ||||
|         availability_result = availability_parse.find(data) | ||||
|         if availability_result: | ||||
|             value['availability'] = availability_result[0].value | ||||
|  | ||||
|         if value.get('availability'): | ||||
|             value['availability'] = re.sub(r'(?i)^(https|http)://schema.org/', '', | ||||
|                                            value.get('availability').strip(' "\'').lower()) if value.get('availability') else None | ||||
|  | ||||
|         # Second, go dig OpenGraph which is something that jsonpath_ng cant do because of the tuples and double-dots (:) | ||||
|         if not value.get('price') or value.get('availability'): | ||||
|             logger.debug(f"Alternatively digging through OpenGraph properties for restock/price info..") | ||||
|             jsonpath_expr = parse('$..properties') | ||||
|  | ||||
|             for match in jsonpath_expr.find(data): | ||||
|                 if not value.get('price'): | ||||
|                     value['price'] = _search_prop_by_value([match.value], "price:amount") | ||||
|                 if not value.get('availability'): | ||||
|                     value['availability'] = _search_prop_by_value([match.value], "product:availability") | ||||
|                 if not value.get('currency'): | ||||
|                     value['currency'] = _search_prop_by_value([match.value], "price:currency") | ||||
|     logger.trace(f"Processed with Extruct in {time.time()-now:.3f}s") | ||||
|  | ||||
|     return value | ||||
|  | ||||
|  | ||||
| def is_between(number, lower=None, upper=None): | ||||
|     """ | ||||
|     Check if a number is between two values. | ||||
|  | ||||
|     Parameters: | ||||
|     number (float): The number to check. | ||||
|     lower (float or None): The lower bound (inclusive). If None, no lower bound. | ||||
|     upper (float or None): The upper bound (inclusive). If None, no upper bound. | ||||
|  | ||||
|     Returns: | ||||
|     bool: True if the number is between the lower and upper bounds, False otherwise. | ||||
|     """ | ||||
|     return (lower is None or lower <= number) and (upper is None or number <= upper) | ||||
|  | ||||
|  | ||||
| class perform_site_check(difference_detection_processor): | ||||
|     screenshot = None | ||||
|     xpath_data = None | ||||
|  | ||||
|     def run_changedetection(self, watch, skip_when_checksum_same=True): | ||||
|         import hashlib | ||||
|  | ||||
|         if not watch: | ||||
|             raise Exception("Watch no longer exists.") | ||||
|  | ||||
|         # Unset any existing notification error | ||||
|         update_obj = {'last_notification_error': False, 'last_error': False, 'restock':  Restock()} | ||||
|  | ||||
|         self.screenshot = self.fetcher.screenshot | ||||
|         self.xpath_data = self.fetcher.xpath_data | ||||
|  | ||||
|         # Track the content type | ||||
|         update_obj['content_type'] = self.fetcher.headers.get('Content-Type', '') | ||||
|         update_obj["last_check_status"] = self.fetcher.get_last_status_code() | ||||
|  | ||||
|         # Only try to process restock information (like scraping for keywords) if the page was actually rendered correctly. | ||||
|         # Otherwise it will assume "in stock" because nothing suggesting the opposite was found | ||||
|         from ...html_tools import html_to_text | ||||
|         text = html_to_text(self.fetcher.content) | ||||
|         logger.debug(f"Length of text after conversion: {len(text)}") | ||||
|         if not len(text): | ||||
|             from ...content_fetchers.exceptions import ReplyWithContentButNoText | ||||
|             raise ReplyWithContentButNoText(url=watch.link, | ||||
|                                             status_code=self.fetcher.get_last_status_code(), | ||||
|                                             screenshot=self.fetcher.screenshot, | ||||
|                                             html_content=self.fetcher.content, | ||||
|                                             xpath_data=self.fetcher.xpath_data | ||||
|                                             ) | ||||
|  | ||||
|         # Which restock settings to compare against? | ||||
|         restock_settings = watch.get('restock_settings', {}) | ||||
|  | ||||
|         # See if any tags have 'activate for individual watches in this tag/group?' enabled and use the first we find | ||||
|         for tag_uuid in watch.get('tags'): | ||||
|             tag = self.datastore.data['settings']['application']['tags'].get(tag_uuid, {}) | ||||
|             if tag.get('overrides_watch'): | ||||
|                 restock_settings = tag.get('restock_settings', {}) | ||||
|                 logger.info(f"Watch {watch.get('uuid')} - Tag '{tag.get('title')}' selected for restock settings override") | ||||
|                 break | ||||
|  | ||||
|  | ||||
|         itemprop_availability = {} | ||||
|         try: | ||||
|             itemprop_availability = get_itemprop_availability(self.fetcher.content) | ||||
|         except MoreThanOnePriceFound as e: | ||||
|             # Add the real data | ||||
|             raise ProcessorException(message="Cannot run, more than one price detected, this plugin is only for product pages with ONE product, try the content-change detection mode.", | ||||
|                                      url=watch.get('url'), | ||||
|                                      status_code=self.fetcher.get_last_status_code(), | ||||
|                                      screenshot=self.fetcher.screenshot, | ||||
|                                      xpath_data=self.fetcher.xpath_data | ||||
|                                      ) | ||||
|  | ||||
|         # Something valid in get_itemprop_availability() by scraping metadata ? | ||||
|         if itemprop_availability.get('price') or itemprop_availability.get('availability'): | ||||
|             # Store for other usage | ||||
|             update_obj['restock'] = itemprop_availability | ||||
|  | ||||
|             if itemprop_availability.get('availability'): | ||||
|                 # @todo: Configurable? | ||||
|                 if any(substring.lower() in itemprop_availability['availability'].lower() for substring in [ | ||||
|                     'instock', | ||||
|                     'instoreonly', | ||||
|                     'limitedavailability', | ||||
|                     'onlineonly', | ||||
|                     'presale'] | ||||
|                        ): | ||||
|                     update_obj['restock']['in_stock'] = True | ||||
|                 else: | ||||
|                     update_obj['restock']['in_stock'] = False | ||||
|  | ||||
|         # Main detection method | ||||
|         fetched_md5 = None | ||||
|  | ||||
|         # store original price if not set | ||||
|         if itemprop_availability and itemprop_availability.get('price') and not itemprop_availability.get('original_price'): | ||||
|             itemprop_availability['original_price'] = itemprop_availability.get('price') | ||||
|             update_obj['restock']["original_price"] = itemprop_availability.get('price') | ||||
|  | ||||
|         if not self.fetcher.instock_data and not itemprop_availability.get('availability'): | ||||
|             raise ProcessorException( | ||||
|                 message=f"Unable to extract restock data for this page unfortunately. (Got code {self.fetcher.get_last_status_code()} from server), no embedded stock information was found and nothing interesting in the text, try using this watch with Chrome.", | ||||
|                 url=watch.get('url'), | ||||
|                 status_code=self.fetcher.get_last_status_code(), | ||||
|                 screenshot=self.fetcher.screenshot, | ||||
|                 xpath_data=self.fetcher.xpath_data | ||||
|                 ) | ||||
|  | ||||
|         # Nothing automatic in microdata found, revert to scraping the page | ||||
|         if self.fetcher.instock_data and itemprop_availability.get('availability') is None: | ||||
|             # 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold. | ||||
|             # Careful! this does not really come from chrome/js when the watch is set to plaintext | ||||
|             update_obj['restock']["in_stock"] = True if self.fetcher.instock_data == 'Possibly in stock' else False | ||||
|             logger.debug(f"Watch UUID {watch.get('uuid')} restock check returned '{self.fetcher.instock_data}' from JS scraper.") | ||||
|  | ||||
|         # What we store in the snapshot | ||||
|         price = update_obj.get('restock').get('price') if update_obj.get('restock').get('price') else "" | ||||
|         snapshot_content = f"In Stock: {update_obj.get('restock').get('in_stock')} - Price: {price}" | ||||
|  | ||||
|         # Main detection method | ||||
|         fetched_md5 = hashlib.md5(snapshot_content.encode('utf-8')).hexdigest() | ||||
|  | ||||
|         # The main thing that all this at the moment comes down to :) | ||||
|         changed_detected = False | ||||
|         logger.debug(f"Watch UUID {watch.get('uuid')} restock check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}") | ||||
|  | ||||
|         # out of stock -> back in stock only? | ||||
|         if watch.get('restock') and watch['restock'].get('in_stock') != update_obj['restock'].get('in_stock'): | ||||
|             # Yes if we only care about it going to instock, AND we are in stock | ||||
|             if restock_settings.get('in_stock_processing') == 'in_stock_only' and update_obj['restock']['in_stock']: | ||||
|                 changed_detected = True | ||||
|  | ||||
|             if restock_settings.get('in_stock_processing') == 'all_changes': | ||||
|                 # All cases | ||||
|                 changed_detected = True | ||||
|  | ||||
|         if restock_settings.get('follow_price_changes') and watch.get('restock') and update_obj.get('restock') and update_obj['restock'].get('price'): | ||||
|             price = float(update_obj['restock'].get('price')) | ||||
|             # Default to current price if no previous price found | ||||
|             if watch['restock'].get('original_price'): | ||||
|                 previous_price = float(watch['restock'].get('original_price')) | ||||
|                 # It was different, but negate it further down | ||||
|                 if price != previous_price: | ||||
|                     changed_detected = True | ||||
|  | ||||
|             # Minimum/maximum price limit | ||||
|             if update_obj.get('restock') and update_obj['restock'].get('price'): | ||||
|                 logger.debug( | ||||
|                     f"{watch.get('uuid')} - Change was detected, 'price_change_max' is '{restock_settings.get('price_change_max', '')}' 'price_change_min' is '{restock_settings.get('price_change_min', '')}', price from website is '{update_obj['restock'].get('price', '')}'.") | ||||
|                 if update_obj['restock'].get('price'): | ||||
|                     min_limit = float(restock_settings.get('price_change_min')) if restock_settings.get('price_change_min') else None | ||||
|                     max_limit = float(restock_settings.get('price_change_max')) if restock_settings.get('price_change_max') else None | ||||
|  | ||||
|                     price = float(update_obj['restock'].get('price')) | ||||
|                     logger.debug(f"{watch.get('uuid')} after float conversion - Min limit: '{min_limit}' Max limit: '{max_limit}' Price: '{price}'") | ||||
|                     if min_limit or max_limit: | ||||
|                         if is_between(number=price, lower=min_limit, upper=max_limit): | ||||
|                             # Price was between min/max limit, so there was nothing todo in any case | ||||
|                             logger.trace(f"{watch.get('uuid')} {price} is between {min_limit} and {max_limit}, nothing to check, forcing changed_detected = False (was {changed_detected})") | ||||
|                             changed_detected = False | ||||
|                         else: | ||||
|                             logger.trace(f"{watch.get('uuid')} {price} is between {min_limit} and {max_limit}, continuing normal comparison") | ||||
|  | ||||
|                     # Price comparison by % | ||||
|                     if watch['restock'].get('original_price') and changed_detected and restock_settings.get('price_change_threshold_percent'): | ||||
|                         previous_price = float(watch['restock'].get('original_price')) | ||||
|                         pc = float(restock_settings.get('price_change_threshold_percent')) | ||||
|                         change = abs((price - previous_price) / previous_price * 100) | ||||
|                         if change and change <= pc: | ||||
|                             logger.debug(f"{watch.get('uuid')} Override change-detected to FALSE because % threshold ({pc}%) was {change:.3f}%") | ||||
|                             changed_detected = False | ||||
|                         else: | ||||
|                             logger.debug(f"{watch.get('uuid')} Price change was {change:.3f}% , (threshold {pc}%)") | ||||
|  | ||||
|         # Always record the new checksum | ||||
|         update_obj["previous_md5"] = fetched_md5 | ||||
|  | ||||
|         return changed_detected, update_obj, snapshot_content.encode('utf-8').strip(), b'' | ||||
| @@ -6,22 +6,23 @@ import os | ||||
| import re | ||||
| import urllib3 | ||||
| 
 | ||||
| from . import difference_detection_processor | ||||
| from ..html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text | ||||
| from changedetectionio.processors import difference_detection_processor | ||||
| from changedetectionio.html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text | ||||
| from changedetectionio import html_tools, content_fetchers | ||||
| from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT | ||||
| import changedetectionio.content_fetchers | ||||
| from copy import deepcopy | ||||
| from loguru import logger | ||||
| 
 | ||||
| urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) | ||||
| 
 | ||||
| name = 'Webpage Text/HTML, JSON and PDF changes' | ||||
| description = 'Detects all text changes where possible' | ||||
| json_filter_prefixes = ['json:', 'jq:'] | ||||
| 
 | ||||
| json_filter_prefixes = ['json:', 'jq:', 'jqraw:'] | ||||
| 
 | ||||
| class FilterNotFoundInResponse(ValueError): | ||||
|     def __init__(self, msg): | ||||
|     def __init__(self, msg, screenshot=None, xpath_data=None): | ||||
|         self.screenshot = screenshot | ||||
|         self.xpath_data = xpath_data | ||||
|         ValueError.__init__(self, msg) | ||||
| 
 | ||||
| 
 | ||||
| @@ -34,14 +35,13 @@ class PDFToHTMLToolNotFound(ValueError): | ||||
| # (set_proxy_from_list) | ||||
| class perform_site_check(difference_detection_processor): | ||||
| 
 | ||||
|     def run_changedetection(self, uuid, skip_when_checksum_same=True): | ||||
|     def run_changedetection(self, watch, skip_when_checksum_same=True): | ||||
| 
 | ||||
|         changed_detected = False | ||||
|         html_content = "" | ||||
|         screenshot = False  # as bytes | ||||
|         stripped_text_from_html = "" | ||||
| 
 | ||||
|         # DeepCopy so we can be sure we don't accidently change anything by reference | ||||
|         watch = deepcopy(self.datastore.data['watching'].get(uuid)) | ||||
|         if not watch: | ||||
|             raise Exception("Watch no longer exists.") | ||||
| 
 | ||||
| @@ -116,12 +116,12 @@ class perform_site_check(difference_detection_processor): | ||||
|         # Better would be if Watch.model could access the global data also | ||||
|         # and then use getattr https://docs.python.org/3/reference/datamodel.html#object.__getitem__ | ||||
|         # https://realpython.com/inherit-python-dict/ instead of doing it procedurely | ||||
|         include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='include_filters') | ||||
|         include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='include_filters') | ||||
| 
 | ||||
|         # 1845 - remove duplicated filters in both group and watch include filter | ||||
|         include_filters_rule = list(dict.fromkeys(watch.get('include_filters', []) + include_filters_from_tags)) | ||||
| 
 | ||||
|         subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='subtractive_selectors'), | ||||
|         subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='subtractive_selectors'), | ||||
|                                  *watch.get("subtractive_selectors", []), | ||||
|                                  *self.datastore.data["settings"]["application"].get("global_subtractive_selectors", []) | ||||
|                                  ] | ||||
| @@ -176,19 +176,19 @@ class perform_site_check(difference_detection_processor): | ||||
|                                                                     html_content=self.fetcher.content, | ||||
|                                                                     append_pretty_line_formatting=not watch.is_source_type_url, | ||||
|                                                                     is_rss=is_rss) | ||||
| 
 | ||||
|                         elif filter_rule.startswith('xpath1:'): | ||||
|                             html_content += html_tools.xpath1_filter(xpath_filter=filter_rule.replace('xpath1:', ''), | ||||
|                                                                     html_content=self.fetcher.content, | ||||
|                                                                     append_pretty_line_formatting=not watch.is_source_type_url, | ||||
|                                                                     is_rss=is_rss) | ||||
|                                                                      html_content=self.fetcher.content, | ||||
|                                                                      append_pretty_line_formatting=not watch.is_source_type_url, | ||||
|                                                                      is_rss=is_rss) | ||||
|                         else: | ||||
|                             # CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text | ||||
|                             html_content += html_tools.include_filters(include_filters=filter_rule, | ||||
|                                                                        html_content=self.fetcher.content, | ||||
|                                                                        append_pretty_line_formatting=not watch.is_source_type_url) | ||||
| 
 | ||||
|                     if not html_content.strip(): | ||||
|                         raise FilterNotFoundInResponse(include_filters_rule) | ||||
|                         raise FilterNotFoundInResponse(msg=include_filters_rule, screenshot=self.fetcher.screenshot, xpath_data=self.fetcher.xpath_data) | ||||
| 
 | ||||
|                 if has_subtractive_selectors: | ||||
|                     html_content = html_tools.element_removal(subtractive_selectors, html_content) | ||||
| @@ -198,18 +198,23 @@ class perform_site_check(difference_detection_processor): | ||||
|                 else: | ||||
|                     # extract text | ||||
|                     do_anchor = self.datastore.data["settings"]["application"].get("render_anchor_tag_content", False) | ||||
|                     stripped_text_from_html = \ | ||||
|                         html_tools.html_to_text( | ||||
|                             html_content=html_content, | ||||
|                             render_anchor_tag_content=do_anchor, | ||||
|                             is_rss=is_rss # #1874 activate the <title workaround hack | ||||
|                         ) | ||||
|                     stripped_text_from_html = html_tools.html_to_text(html_content=html_content, | ||||
|                                                                       render_anchor_tag_content=do_anchor, | ||||
|                                                                       is_rss=is_rss)  # 1874 activate the <title workaround hack | ||||
| 
 | ||||
|         if watch.get('sort_text_alphabetically') and stripped_text_from_html: | ||||
| 
 | ||||
|         if watch.get('trim_text_whitespace'): | ||||
|             stripped_text_from_html = '\n'.join(line.strip() for line in stripped_text_from_html.replace("\n\n", "\n").splitlines()) | ||||
| 
 | ||||
|         if watch.get('remove_duplicate_lines'): | ||||
|             stripped_text_from_html = '\n'.join(dict.fromkeys(line for line in stripped_text_from_html.replace("\n\n", "\n").splitlines())) | ||||
| 
 | ||||
|         if watch.get('sort_text_alphabetically'): | ||||
|             # Note: Because a <p>something</p> will add an extra line feed to signify the paragraph gap | ||||
|             # we end up with 'Some text\n\n', sorting will add all those extra \n at the start, so we remove them here. | ||||
|             stripped_text_from_html = stripped_text_from_html.replace('\n\n', '\n') | ||||
|             stripped_text_from_html = '\n'.join( sorted(stripped_text_from_html.splitlines(), key=lambda x: x.lower() )) | ||||
|             stripped_text_from_html = stripped_text_from_html.replace("\n\n", "\n") | ||||
|             stripped_text_from_html = '\n'.join(sorted(stripped_text_from_html.splitlines(), key=lambda x: x.lower())) | ||||
| 
 | ||||
| 
 | ||||
|         # Re #340 - return the content before the 'ignore text' was applied | ||||
|         text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8') | ||||
| @@ -219,10 +224,10 @@ class perform_site_check(difference_detection_processor): | ||||
|         # Rewrite's the processing text based on only what diff result they want to see | ||||
|         if watch.has_special_diff_filter_options_set() and len(watch.history.keys()): | ||||
|             # Now the content comes from the diff-parser and not the returned HTTP traffic, so could be some differences | ||||
|             from .. import diff | ||||
|             from changedetectionio import diff | ||||
|             # needs to not include (added) etc or it may get used twice | ||||
|             # Replace the processed text with the preferred result | ||||
|             rendered_diff = diff.render_diff(previous_version_file_contents=watch.get_last_fetched_before_filters(), | ||||
|             rendered_diff = diff.render_diff(previous_version_file_contents=watch.get_last_fetched_text_before_filters(), | ||||
|                                              newest_version_file_contents=stripped_text_from_html, | ||||
|                                              include_equal=False,  # not the same lines | ||||
|                                              include_added=watch.get('filter_text_added', True), | ||||
| @@ -231,13 +236,13 @@ class perform_site_check(difference_detection_processor): | ||||
|                                              line_feed_sep="\n", | ||||
|                                              include_change_type_prefix=False) | ||||
| 
 | ||||
|             watch.save_last_fetched_before_filters(text_content_before_ignored_filter) | ||||
|             watch.save_last_text_fetched_before_filters(text_content_before_ignored_filter) | ||||
| 
 | ||||
|             if not rendered_diff and stripped_text_from_html: | ||||
|                 # We had some content, but no differences were found | ||||
|                 # Store our new file as the MD5 so it will trigger in the future | ||||
|                 c = hashlib.md5(text_content_before_ignored_filter.translate(None, b'\r\n\t ')).hexdigest() | ||||
|                 return False, {'previous_md5': c}, stripped_text_from_html.encode('utf-8') | ||||
|                 return False, {'previous_md5': c}, stripped_text_from_html.encode('utf-8'), stripped_text_from_html.encode('utf-8') | ||||
|             else: | ||||
|                 stripped_text_from_html = rendered_diff | ||||
| 
 | ||||
| @@ -246,9 +251,10 @@ class perform_site_check(difference_detection_processor): | ||||
|         if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0: | ||||
|             raise content_fetchers.exceptions.ReplyWithContentButNoText(url=url, | ||||
|                                                             status_code=self.fetcher.get_last_status_code(), | ||||
|                                                             screenshot=screenshot, | ||||
|                                                             screenshot=self.fetcher.screenshot, | ||||
|                                                             has_filters=has_filter_rule, | ||||
|                                                             html_content=html_content | ||||
|                                                             html_content=html_content, | ||||
|                                                             xpath_data=self.fetcher.xpath_data | ||||
|                                                             ) | ||||
| 
 | ||||
|         # We rely on the actual text in the html output.. many sites have random script vars etc, | ||||
| @@ -290,7 +296,7 @@ class perform_site_check(difference_detection_processor): | ||||
|                         for match in res: | ||||
|                             regex_matched_output += [match] + [b'\n'] | ||||
| 
 | ||||
|             # Now we will only show what the regex matched | ||||
|             ########################################################## | ||||
|             stripped_text_from_html = b'' | ||||
|             text_content_before_ignored_filter = b'' | ||||
|             if regex_matched_output: | ||||
| @@ -298,6 +304,8 @@ class perform_site_check(difference_detection_processor): | ||||
|                 stripped_text_from_html = b''.join(regex_matched_output) | ||||
|                 text_content_before_ignored_filter = stripped_text_from_html | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
|         # Re #133 - if we should strip whitespaces from triggering the change detected comparison | ||||
|         if self.datastore.data['settings']['application'].get('ignore_whitespace', False): | ||||
|             fetched_md5 = hashlib.md5(stripped_text_from_html.translate(None, b'\r\n\t ')).hexdigest() | ||||
| @@ -338,23 +346,17 @@ class perform_site_check(difference_detection_processor): | ||||
|         if blocked: | ||||
|             changed_detected = False | ||||
| 
 | ||||
|         # Extract title as title | ||||
|         if is_html: | ||||
|             if self.datastore.data['settings']['application'].get('extract_title_as_title') or watch['extract_title_as_title']: | ||||
|                 if not watch['title'] or not len(watch['title']): | ||||
|                     update_obj['title'] = html_tools.extract_element(find='title', html_content=self.fetcher.content) | ||||
| 
 | ||||
|         logger.debug(f"Watch UUID {uuid} content check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}") | ||||
|         logger.debug(f"Watch UUID {watch.get('uuid')} content check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}") | ||||
| 
 | ||||
|         if changed_detected: | ||||
|             if watch.get('check_unique_lines', False): | ||||
|                 has_unique_lines = watch.lines_contain_something_unique_compared_to_history(lines=stripped_text_from_html.splitlines()) | ||||
|                 # One or more lines? unsure? | ||||
|                 if not has_unique_lines: | ||||
|                     logger.debug(f"check_unique_lines: UUID {uuid} didnt have anything new setting change_detected=False") | ||||
|                     logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} didnt have anything new setting change_detected=False") | ||||
|                     changed_detected = False | ||||
|                 else: | ||||
|                     logger.debug(f"check_unique_lines: UUID {uuid} had unique content") | ||||
|                     logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} had unique content") | ||||
| 
 | ||||
|         # Always record the new checksum | ||||
|         update_obj["previous_md5"] = fetched_md5 | ||||
| @@ -363,4 +365,4 @@ class perform_site_check(difference_detection_processor): | ||||
|         if not watch.get('previous_md5'): | ||||
|             watch['previous_md5'] = fetched_md5 | ||||
| 
 | ||||
|         return changed_detected, update_obj, text_content_before_ignored_filter | ||||
|         return changed_detected, update_obj, text_content_before_ignored_filter, stripped_text_from_html | ||||
| @@ -35,4 +35,8 @@ pytest tests/test_access_control.py | ||||
| pytest tests/test_notification.py | ||||
| pytest tests/test_backend.py | ||||
| pytest tests/test_rss.py | ||||
| pytest tests/test_unique_lines.py | ||||
| pytest tests/test_unique_lines.py | ||||
|  | ||||
| # Check file:// will pickup a file when enabled | ||||
| echo "Hello world" > /tmp/test-file.txt | ||||
| ALLOW_FILE_URI=yes pytest tests/test_security.py | ||||
|   | ||||
| @@ -16,25 +16,31 @@ echo "---------------------------------- SOCKS5 -------------------" | ||||
| docker run --network changedet-network \ | ||||
|   -v `pwd`/tests/proxy_socks5/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json \ | ||||
|   --rm \ | ||||
|   -e "FLASK_SERVER_NAME=cdio" \ | ||||
|   --hostname cdio \ | ||||
|   -e "SOCKSTEST=proxiesjson" \ | ||||
|   test-changedetectionio \ | ||||
|   bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py' | ||||
|   bash -c 'cd changedetectionio && pytest --live-server-host=0.0.0.0 --live-server-port=5004  -s tests/proxy_socks5/test_socks5_proxy_sources.py' | ||||
|  | ||||
| # SOCKS5 related - by manually entering in UI | ||||
| docker run --network changedet-network \ | ||||
|   --rm \ | ||||
|   -e "FLASK_SERVER_NAME=cdio" \ | ||||
|   --hostname cdio \ | ||||
|   -e "SOCKSTEST=manual" \ | ||||
|   test-changedetectionio \ | ||||
|   bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy.py' | ||||
|   bash -c 'cd changedetectionio && pytest --live-server-host=0.0.0.0 --live-server-port=5004  -s tests/proxy_socks5/test_socks5_proxy.py' | ||||
|  | ||||
| # SOCKS5 related - test from proxies.json via playwright - NOTE- PLAYWRIGHT DOESNT SUPPORT AUTHENTICATING PROXY | ||||
| docker run --network changedet-network \ | ||||
|   -e "SOCKSTEST=manual-playwright" \ | ||||
|   --hostname cdio \ | ||||
|   -e "FLASK_SERVER_NAME=cdio" \ | ||||
|   -v `pwd`/tests/proxy_socks5/proxies.json-example-noauth:/app/changedetectionio/test-datastore/proxies.json \ | ||||
|   -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" \ | ||||
|   --rm \ | ||||
|   test-changedetectionio \ | ||||
|   bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py' | ||||
|   bash -c 'cd changedetectionio && pytest --live-server-host=0.0.0.0 --live-server-port=5004  -s tests/proxy_socks5/test_socks5_proxy_sources.py' | ||||
|  | ||||
| echo "socks5 server logs" | ||||
| docker logs socks5proxy | ||||
|   | ||||
| @@ -1,14 +1,5 @@ | ||||
| $(document).ready(function () { | ||||
|  | ||||
|     // duplicate | ||||
|     var csrftoken = $('input[name=csrf_token]').val(); | ||||
|     $.ajaxSetup({ | ||||
|         beforeSend: function (xhr, settings) { | ||||
|             if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) { | ||||
|                 xhr.setRequestHeader("X-CSRFToken", csrftoken) | ||||
|             } | ||||
|         } | ||||
|     }) | ||||
|     var browsersteps_session_id; | ||||
|     var browser_interface_seconds_remaining = 0; | ||||
|     var apply_buttons_disabled = false; | ||||
|   | ||||
							
								
								
									
										10
									
								
								changedetectionio/static/js/csrf.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								changedetectionio/static/js/csrf.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,10 @@ | ||||
| $(document).ready(function () { | ||||
|     $.ajaxSetup({ | ||||
|         beforeSend: function (xhr, settings) { | ||||
|             if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) { | ||||
|                 xhr.setRequestHeader("X-CSRFToken", csrftoken) | ||||
|             } | ||||
|         } | ||||
|     }) | ||||
| }); | ||||
|  | ||||
| @@ -1,13 +1,4 @@ | ||||
| $(document).ready(function () { | ||||
|     var csrftoken = $('input[name=csrf_token]').val(); | ||||
|     $.ajaxSetup({ | ||||
|         beforeSend: function (xhr, settings) { | ||||
|             if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) { | ||||
|                 xhr.setRequestHeader("X-CSRFToken", csrftoken) | ||||
|             } | ||||
|         } | ||||
|     }) | ||||
|  | ||||
|     $('.needs-localtime').each(function () { | ||||
|         for (var option of this.options) { | ||||
|             var dateObject = new Date(option.value * 1000); | ||||
| @@ -48,6 +39,12 @@ $(document).ready(function () { | ||||
|       $("#highlightSnippet").remove(); | ||||
|     } | ||||
|  | ||||
|     // Listen for Escape key press | ||||
|     window.addEventListener('keydown', function (e) { | ||||
|         if (e.key === 'Escape') { | ||||
|             clean(); | ||||
|         } | ||||
|     }, false); | ||||
|  | ||||
|     function dragTextHandler(event) { | ||||
|         console.log('mouseupped'); | ||||
|   | ||||
| @@ -18,9 +18,11 @@ $(document).ready(function () { | ||||
|  | ||||
|     }); | ||||
|  | ||||
|     $("#notification-token-toggle").click(function (e) { | ||||
|     $(".toggle-show").click(function (e) { | ||||
|         e.preventDefault(); | ||||
|         $('#notification-tokens-info').toggle(); | ||||
|         let target = $(this).data('target'); | ||||
|         $(target).toggle(); | ||||
|     }); | ||||
|  | ||||
| }); | ||||
|  | ||||
|   | ||||
| @@ -13,16 +13,6 @@ $(document).ready(function() { | ||||
|   $('#send-test-notification').click(function (e) { | ||||
|     e.preventDefault(); | ||||
|  | ||||
|     // this can be global | ||||
|     var csrftoken = $('input[name=csrf_token]').val(); | ||||
|     $.ajaxSetup({ | ||||
|         beforeSend: function(xhr, settings) { | ||||
|             if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) { | ||||
|                 xhr.setRequestHeader("X-CSRFToken", csrftoken) | ||||
|             } | ||||
|         } | ||||
|     }) | ||||
|  | ||||
|     data = { | ||||
|       notification_body: $('#notification_body').val(), | ||||
|       notification_format: $('#notification_format').val(), | ||||
|   | ||||
| @@ -2,250 +2,258 @@ | ||||
| // All rights reserved. | ||||
| // yes - this is really a hack, if you are a front-ender and want to help, please get in touch! | ||||
|  | ||||
| $(document).ready(function () { | ||||
| let runInClearMode = false; | ||||
|  | ||||
|     var current_selected_i; | ||||
|     var state_clicked = false; | ||||
| $(document).ready(() => { | ||||
|     let currentSelections = []; | ||||
|     let currentSelection = null; | ||||
|     let appendToList = false; | ||||
|     let c, xctx, ctx; | ||||
|     let xScale = 1, yScale = 1; | ||||
|     let selectorImage, selectorImageRect, selectorData; | ||||
|  | ||||
|     var c; | ||||
|  | ||||
|     // greyed out fill context | ||||
|     var xctx; | ||||
|     // redline highlight context | ||||
|     var ctx; | ||||
|     // Global jQuery selectors with "Elem" appended | ||||
|     const $selectorCanvasElem = $('#selector-canvas'); | ||||
|     const $includeFiltersElem = $("#include_filters"); | ||||
|     const $selectorBackgroundElem = $("img#selector-background"); | ||||
|     const $selectorCurrentXpathElem = $("#selector-current-xpath span"); | ||||
|     const $fetchingUpdateNoticeElem = $('.fetching-update-notice'); | ||||
|     const $selectorWrapperElem = $("#selector-wrapper"); | ||||
|  | ||||
|     var current_default_xpath = []; | ||||
|     var x_scale = 1; | ||||
|     var y_scale = 1; | ||||
|     var selector_image; | ||||
|     var selector_image_rect; | ||||
|     var selector_data; | ||||
|     // Color constants | ||||
|     const FILL_STYLE_HIGHLIGHT = 'rgba(205,0,0,0.35)'; | ||||
|     const FILL_STYLE_GREYED_OUT = 'rgba(205,205,205,0.95)'; | ||||
|     const STROKE_STYLE_HIGHLIGHT = 'rgba(255,0,0, 0.9)'; | ||||
|     const FILL_STYLE_REDLINE = 'rgba(255,0,0, 0.1)'; | ||||
|     const STROKE_STYLE_REDLINE = 'rgba(225,0,0,0.9)'; | ||||
|  | ||||
|     $('#visualselector-tab').click(function () { | ||||
|         $("img#selector-background").off('load'); | ||||
|         state_clicked = false; | ||||
|         current_selected_i = false; | ||||
|         bootstrap_visualselector(); | ||||
|     $('#visualselector-tab').click(() => { | ||||
|         $selectorBackgroundElem.off('load'); | ||||
|         currentSelections = []; | ||||
|         bootstrapVisualSelector(); | ||||
|     }); | ||||
|  | ||||
|     $(document).on('keydown', function (event) { | ||||
|         if ($("img#selector-background").is(":visible")) { | ||||
|             if (event.key == "Escape") { | ||||
|                 state_clicked = false; | ||||
|                 ctx.clearRect(0, 0, c.width, c.height); | ||||
|     function clearReset() { | ||||
|         ctx.clearRect(0, 0, c.width, c.height); | ||||
|  | ||||
|         if ($includeFiltersElem.val().length) { | ||||
|             alert("Existing filters under the 'Filters & Triggers' tab were cleared."); | ||||
|         } | ||||
|         $includeFiltersElem.val(''); | ||||
|  | ||||
|         currentSelections = []; | ||||
|  | ||||
|         // Means we ignore the xpaths from the scraper marked as sel.highlight_as_custom_filter (it matched a previous selector) | ||||
|         runInClearMode = true; | ||||
|  | ||||
|         highlightCurrentSelected(); | ||||
|     } | ||||
|  | ||||
|     function splitToList(v) { | ||||
|         return v.split('\n').map(line => line.trim()).filter(line => line.length > 0); | ||||
|     } | ||||
|  | ||||
|     function sortScrapedElementsBySize() { | ||||
|         // Sort the currentSelections array by area (width * height) in descending order | ||||
|         selectorData['size_pos'].sort((a, b) => { | ||||
|             const areaA = a.width * a.height; | ||||
|             const areaB = b.width * b.height; | ||||
|             return areaB - areaA; | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     $(document).on('keydown keyup', (event) => { | ||||
|         if (event.code === 'ShiftLeft' || event.code === 'ShiftRight') { | ||||
|             appendToList = event.type === 'keydown'; | ||||
|         } | ||||
|  | ||||
|         if (event.type === 'keydown') { | ||||
|             if ($selectorBackgroundElem.is(":visible") && event.key === "Escape") { | ||||
|                 clearReset(); | ||||
|             } | ||||
|         } | ||||
|     }); | ||||
|  | ||||
|     // For when the page loads | ||||
|     if (!window.location.hash || window.location.hash != '#visualselector') { | ||||
|         $("img#selector-background").attr('src', ''); | ||||
|     $('#clear-selector').on('click', () => { | ||||
|         clearReset(); | ||||
|     }); | ||||
|     // So if they start switching between visualSelector and manual filters, stop it from rendering old filters | ||||
|     $('li.tab a').on('click', () => { | ||||
|         runInClearMode = true; | ||||
|     }); | ||||
|  | ||||
|     if (!window.location.hash || window.location.hash !== '#visualselector') { | ||||
|         $selectorBackgroundElem.attr('src', ''); | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     // Handle clearing button/link | ||||
|     $('#clear-selector').on('click', function (event) { | ||||
|         if (!state_clicked) { | ||||
|             alert('Oops, Nothing selected!'); | ||||
|         } | ||||
|         state_clicked = false; | ||||
|         ctx.clearRect(0, 0, c.width, c.height); | ||||
|         xctx.clearRect(0, 0, c.width, c.height); | ||||
|         $("#include_filters").val(''); | ||||
|     }); | ||||
|     bootstrapVisualSelector(); | ||||
|  | ||||
|  | ||||
|     bootstrap_visualselector(); | ||||
|  | ||||
|  | ||||
|     function bootstrap_visualselector() { | ||||
|         if (1) { | ||||
|             // bootstrap it, this will trigger everything else | ||||
|             $("img#selector-background").on("error", function () { | ||||
|                 $('.fetching-update-notice').html("<strong>Ooops!</strong> The VisualSelector tool needs atleast one fetched page, please unpause the watch and/or wait for the watch to complete fetching and then reload this page."); | ||||
|                 $('.fetching-update-notice').css('color','#bb0000'); | ||||
|                 $('#selector-current-xpath').hide(); | ||||
|                 $('#clear-selector').hide(); | ||||
|             }).bind('load', function () { | ||||
|     function bootstrapVisualSelector() { | ||||
|         $selectorBackgroundElem | ||||
|             .on("error", () => { | ||||
|                 $fetchingUpdateNoticeElem.html("<strong>Ooops!</strong> The VisualSelector tool needs at least one fetched page, please unpause the watch and/or wait for the watch to complete fetching and then reload this page.") | ||||
|                     .css('color', '#bb0000'); | ||||
|                 $('#selector-current-xpath, #clear-selector').hide(); | ||||
|             }) | ||||
|             .on('load', () => { | ||||
|                 console.log("Loaded background..."); | ||||
|                 c = document.getElementById("selector-canvas"); | ||||
|                 // greyed out fill context | ||||
|                 xctx = c.getContext("2d"); | ||||
|                 // redline highlight context | ||||
|                 ctx = c.getContext("2d"); | ||||
|                 if ($("#include_filters").val().trim().length) { | ||||
|                     current_default_xpath = $("#include_filters").val().split(/\r?\n/g); | ||||
|                 } else { | ||||
|                     current_default_xpath = []; | ||||
|                 } | ||||
|                 fetch_data(); | ||||
|                 $('#selector-canvas').off("mousemove mousedown"); | ||||
|                 // screenshot_url defined in the edit.html template | ||||
|             }).attr("src", screenshot_url); | ||||
|         } | ||||
|         // Tell visualSelector that the image should update | ||||
|         var s = $("img#selector-background").attr('src') + "?" + new Date().getTime(); | ||||
|         $("img#selector-background").attr('src', s) | ||||
|                 fetchData(); | ||||
|                 $selectorCanvasElem.off("mousemove mousedown"); | ||||
|             }) | ||||
|             .attr("src", screenshot_url); | ||||
|  | ||||
|         let s = `${$selectorBackgroundElem.attr('src')}?${new Date().getTime()}`; | ||||
|         $selectorBackgroundElem.attr('src', s); | ||||
|     } | ||||
|  | ||||
|     // This is fired once the img src is loaded in bootstrap_visualselector() | ||||
|     function fetch_data() { | ||||
|         // Image is ready | ||||
|         $('.fetching-update-notice').html("Fetching element data.."); | ||||
|     function alertIfFilterNotFound() { | ||||
|         let existingFilters = splitToList($includeFiltersElem.val()); | ||||
|         let sizePosXpaths = selectorData['size_pos'].map(sel => sel.xpath); | ||||
|  | ||||
|         for (let filter of existingFilters) { | ||||
|             if (!sizePosXpaths.includes(filter)) { | ||||
|                 alert(`One or more of your existing filters was not found and will be removed when a new filter is selected.`); | ||||
|                 break; | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     function fetchData() { | ||||
|         $fetchingUpdateNoticeElem.html("Fetching element data.."); | ||||
|  | ||||
|         $.ajax({ | ||||
|             url: watch_visual_selector_data_url, | ||||
|             context: document.body | ||||
|         }).done(function (data) { | ||||
|             $('.fetching-update-notice').html("Rendering.."); | ||||
|             selector_data = data; | ||||
|             console.log("Reported browser width from backend: " + data['browser_width']); | ||||
|             state_clicked = false; | ||||
|             set_scale(); | ||||
|             reflow_selector(); | ||||
|             $('.fetching-update-notice').fadeOut(); | ||||
|         }); | ||||
|         }).done((data) => { | ||||
|             $fetchingUpdateNoticeElem.html("Rendering.."); | ||||
|             selectorData = data; | ||||
|             sortScrapedElementsBySize(); | ||||
|             console.log(`Reported browser width from backend: ${data['browser_width']}`); | ||||
|  | ||||
|             // Little sanity check for the user, alert them if something missing | ||||
|             alertIfFilterNotFound(); | ||||
|  | ||||
|             setScale(); | ||||
|             reflowSelector(); | ||||
|             $fetchingUpdateNoticeElem.fadeOut(); | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     function updateFiltersText() { | ||||
|         // Assuming currentSelections is already defined and contains the selections | ||||
|         let uniqueSelections = new Set(currentSelections.map(sel => (sel[0] === '/' ? `xpath:${sel.xpath}` : sel.xpath))); | ||||
|  | ||||
|     function set_scale() { | ||||
|  | ||||
|         // some things to check if the scaling doesnt work | ||||
|         // - that the widths/sizes really are about the actual screen size cat elements.json |grep -o width......|sort|uniq | ||||
|         $("#selector-wrapper").show(); | ||||
|         selector_image = $("img#selector-background")[0]; | ||||
|         selector_image_rect = selector_image.getBoundingClientRect(); | ||||
|  | ||||
|         // make the canvas the same size as the image | ||||
|         $('#selector-canvas').attr('height', selector_image_rect.height); | ||||
|         $('#selector-canvas').attr('width', selector_image_rect.width); | ||||
|         $('#selector-wrapper').attr('width', selector_image_rect.width); | ||||
|         x_scale = selector_image_rect.width / selector_data['browser_width']; | ||||
|         y_scale = selector_image_rect.height / selector_image.naturalHeight; | ||||
|         ctx.strokeStyle = 'rgba(255,0,0, 0.9)'; | ||||
|         ctx.fillStyle = 'rgba(255,0,0, 0.1)'; | ||||
|         ctx.lineWidth = 3; | ||||
|         console.log("scaling set  x: " + x_scale + " by y:" + y_scale); | ||||
|         $("#selector-current-xpath").css('max-width', selector_image_rect.width); | ||||
|     } | ||||
|  | ||||
|     function reflow_selector() { | ||||
|         $(window).resize(function () { | ||||
|             set_scale(); | ||||
|             highlight_current_selected_i(); | ||||
|         }); | ||||
|         var selector_currnt_xpath_text = $("#selector-current-xpath span"); | ||||
|  | ||||
|         set_scale(); | ||||
|  | ||||
|         console.log(selector_data['size_pos'].length + " selectors found"); | ||||
|  | ||||
|         // highlight the default one if we can find it in the xPath list | ||||
|         // or the xpath matches the default one | ||||
|         found = false; | ||||
|         if (current_default_xpath.length) { | ||||
|             // Find the first one that matches | ||||
|             // @todo In the future paint all that match | ||||
|             for (const c of current_default_xpath) { | ||||
|                 for (var i = selector_data['size_pos'].length; i !== 0; i--) { | ||||
|                     if (selector_data['size_pos'][i - 1].xpath.trim() === c.trim()) { | ||||
|                         console.log("highlighting " + c); | ||||
|                         current_selected_i = i - 1; | ||||
|                         highlight_current_selected_i(); | ||||
|                         found = true; | ||||
|                         break; | ||||
|                     } | ||||
|                 } | ||||
|                 if (found) { | ||||
|                     break; | ||||
|                 } | ||||
|             } | ||||
|             if (!found) { | ||||
|                 alert("Unfortunately your existing CSS/xPath Filter was no longer found!"); | ||||
|             } | ||||
|         if (currentSelections.length > 0) { | ||||
|             // Convert the Set back to an array and join with newline characters | ||||
|             let textboxFilterText = Array.from(uniqueSelections).join("\n"); | ||||
|             $includeFiltersElem.val(textboxFilterText); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     function setScale() { | ||||
|         $selectorWrapperElem.show(); | ||||
|         selectorImage = $selectorBackgroundElem[0]; | ||||
|         selectorImageRect = selectorImage.getBoundingClientRect(); | ||||
|  | ||||
|         $('#selector-canvas').bind('mousemove', function (e) { | ||||
|             if (state_clicked) { | ||||
|                 return; | ||||
|         $selectorCanvasElem.attr({ | ||||
|             'height': selectorImageRect.height, | ||||
|             'width': selectorImageRect.width | ||||
|         }); | ||||
|         $selectorWrapperElem.attr('width', selectorImageRect.width); | ||||
|         $('#visual-selector-heading').css('max-width', selectorImageRect.width + "px") | ||||
|  | ||||
|         xScale = selectorImageRect.width / selectorImage.naturalWidth; | ||||
|         yScale = selectorImageRect.height / selectorImage.naturalHeight; | ||||
|  | ||||
|         ctx.strokeStyle = STROKE_STYLE_HIGHLIGHT; | ||||
|         ctx.fillStyle = FILL_STYLE_REDLINE; | ||||
|         ctx.lineWidth = 3; | ||||
|         console.log("Scaling set  x: " + xScale + " by y:" + yScale); | ||||
|         $("#selector-current-xpath").css('max-width', selectorImageRect.width); | ||||
|     } | ||||
|  | ||||
|     function reflowSelector() { | ||||
|         $(window).resize(() => { | ||||
|             setScale(); | ||||
|             highlightCurrentSelected(); | ||||
|         }); | ||||
|  | ||||
|         setScale(); | ||||
|  | ||||
|         console.log(selectorData['size_pos'].length + " selectors found"); | ||||
|  | ||||
|         let existingFilters = splitToList($includeFiltersElem.val()); | ||||
|  | ||||
|         selectorData['size_pos'].forEach(sel => { | ||||
|             if ((!runInClearMode && sel.highlight_as_custom_filter) || existingFilters.includes(sel.xpath)) { | ||||
|                 console.log("highlighting " + c); | ||||
|                 currentSelections.push(sel); | ||||
|             } | ||||
|             ctx.clearRect(0, 0, c.width, c.height); | ||||
|             current_selected_i = null; | ||||
|         }); | ||||
|  | ||||
|             // Add in offset | ||||
|             if ((typeof e.offsetX === "undefined" || typeof e.offsetY === "undefined") || (e.offsetX === 0 && e.offsetY === 0)) { | ||||
|                 var targetOffset = $(e.target).offset(); | ||||
|  | ||||
|         highlightCurrentSelected(); | ||||
|         updateFiltersText(); | ||||
|  | ||||
|         $selectorCanvasElem.bind('mousemove', handleMouseMove.debounce(5)); | ||||
|         $selectorCanvasElem.bind('mousedown', handleMouseDown.debounce(5)); | ||||
|         $selectorCanvasElem.bind('mouseleave', highlightCurrentSelected.debounce(5)); | ||||
|  | ||||
|         function handleMouseMove(e) { | ||||
|             if (!e.offsetX && !e.offsetY) { | ||||
|                 const targetOffset = $(e.target).offset(); | ||||
|                 e.offsetX = e.pageX - targetOffset.left; | ||||
|                 e.offsetY = e.pageY - targetOffset.top; | ||||
|             } | ||||
|  | ||||
|             // Reverse order - the most specific one should be deeper/"laster" | ||||
|             // Basically, find the most 'deepest' | ||||
|             var found = 0; | ||||
|             ctx.fillStyle = 'rgba(205,0,0,0.35)'; | ||||
|             // Will be sorted by smallest width*height first | ||||
|             for (var i = 0; i <= selector_data['size_pos'].length; i++) { | ||||
|                 // draw all of them? let them choose somehow? | ||||
|                 var sel = selector_data['size_pos'][i]; | ||||
|                 // If we are in a bounding-box | ||||
|                 if (e.offsetY > sel.top * y_scale && e.offsetY < sel.top * y_scale + sel.height * y_scale | ||||
|                     && | ||||
|                     e.offsetX > sel.left * y_scale && e.offsetX < sel.left * y_scale + sel.width * y_scale | ||||
|             ctx.fillStyle = FILL_STYLE_HIGHLIGHT; | ||||
|  | ||||
|                 ) { | ||||
|  | ||||
|                     // FOUND ONE | ||||
|                     set_current_selected_text(sel.xpath); | ||||
|                     ctx.strokeRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale); | ||||
|                     ctx.fillRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale); | ||||
|  | ||||
|                     // no need to keep digging | ||||
|                     // @todo or, O to go out/up, I to go in | ||||
|                     // or double click to go up/out the selector? | ||||
|                     current_selected_i = i; | ||||
|                     found += 1; | ||||
|                     break; | ||||
|             selectorData['size_pos'].forEach(sel => { | ||||
|                 if (e.offsetY > sel.top * yScale && e.offsetY < sel.top * yScale + sel.height * yScale && | ||||
|                     e.offsetX > sel.left * yScale && e.offsetX < sel.left * yScale + sel.width * yScale) { | ||||
|                     setCurrentSelectedText(sel.xpath); | ||||
|                     drawHighlight(sel); | ||||
|                     currentSelections.push(sel); | ||||
|                     currentSelection = sel; | ||||
|                     highlightCurrentSelected(); | ||||
|                     currentSelections.pop(); | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|         }.debounce(5)); | ||||
|  | ||||
|         function set_current_selected_text(s) { | ||||
|             selector_currnt_xpath_text[0].innerHTML = s; | ||||
|         } | ||||
|  | ||||
|         function highlight_current_selected_i() { | ||||
|             if (state_clicked) { | ||||
|                 state_clicked = false; | ||||
|                 xctx.clearRect(0, 0, c.width, c.height); | ||||
|                 return; | ||||
|             } | ||||
|  | ||||
|             var sel = selector_data['size_pos'][current_selected_i]; | ||||
|             if (sel[0] == '/') { | ||||
|                 // @todo - not sure just checking / is right | ||||
|                 $("#include_filters").val('xpath:' + sel.xpath); | ||||
|             } else { | ||||
|                 $("#include_filters").val(sel.xpath); | ||||
|             } | ||||
|             xctx.fillStyle = 'rgba(205,205,205,0.95)'; | ||||
|             xctx.strokeStyle = 'rgba(225,0,0,0.9)'; | ||||
|             xctx.lineWidth = 3; | ||||
|             xctx.fillRect(0, 0, c.width, c.height); | ||||
|             // Clear out what only should be seen (make a clear/clean spot) | ||||
|             xctx.clearRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale); | ||||
|             xctx.strokeRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale); | ||||
|             state_clicked = true; | ||||
|             set_current_selected_text(sel.xpath); | ||||
|  | ||||
|             }) | ||||
|         } | ||||
|  | ||||
|  | ||||
|         $('#selector-canvas').bind('mousedown', function (e) { | ||||
|             highlight_current_selected_i(); | ||||
|         }); | ||||
|         function setCurrentSelectedText(s) { | ||||
|             $selectorCurrentXpathElem[0].innerHTML = s; | ||||
|         } | ||||
|  | ||||
|         function drawHighlight(sel) { | ||||
|             ctx.strokeRect(sel.left * xScale, sel.top * yScale, sel.width * xScale, sel.height * yScale); | ||||
|             ctx.fillRect(sel.left * xScale, sel.top * yScale, sel.width * xScale, sel.height * yScale); | ||||
|         } | ||||
|  | ||||
|         function handleMouseDown() { | ||||
|             // If we are in 'appendToList' mode, grow the list, if not, just 1 | ||||
|             currentSelections = appendToList ? [...currentSelections, currentSelection] : [currentSelection]; | ||||
|             highlightCurrentSelected(); | ||||
|             updateFiltersText(); | ||||
|         } | ||||
|  | ||||
|     } | ||||
|  | ||||
|     function highlightCurrentSelected() { | ||||
|         xctx.fillStyle = FILL_STYLE_GREYED_OUT; | ||||
|         xctx.strokeStyle = STROKE_STYLE_REDLINE; | ||||
|         xctx.lineWidth = 3; | ||||
|         xctx.clearRect(0, 0, c.width, c.height); | ||||
|  | ||||
|         currentSelections.forEach(sel => { | ||||
|             //xctx.clearRect(sel.left * xScale, sel.top * yScale, sel.width * xScale, sel.height * yScale); | ||||
|             xctx.strokeRect(sel.left * xScale, sel.top * yScale, sel.width * xScale, sel.height * yScale); | ||||
|         }); | ||||
|     } | ||||
| }); | ||||
| @@ -1,8 +1,8 @@ | ||||
| function toggleOpacity(checkboxSelector, fieldSelector) { | ||||
| function toggleOpacity(checkboxSelector, fieldSelector, inverted) { | ||||
|     const checkbox = document.querySelector(checkboxSelector); | ||||
|     const fields = document.querySelectorAll(fieldSelector); | ||||
|     function updateOpacity() { | ||||
|         const opacityValue = checkbox.checked ? 0.6 : 1; | ||||
|         const opacityValue = !checkbox.checked ? (inverted ? 0.6 : 1) : (inverted ? 1 : 0.6); | ||||
|         fields.forEach(field => { | ||||
|             field.style.opacity = opacityValue; | ||||
|         }); | ||||
| @@ -12,6 +12,55 @@ function toggleOpacity(checkboxSelector, fieldSelector) { | ||||
|     checkbox.addEventListener('change', updateOpacity); | ||||
| } | ||||
|  | ||||
| (function($) { | ||||
|     // Object to store ongoing requests by namespace | ||||
|     const requests = {}; | ||||
|  | ||||
|     $.abortiveSingularAjax = function(options) { | ||||
|         const namespace = options.namespace || 'default'; | ||||
|  | ||||
|         // Abort the current request in this namespace if it's still ongoing | ||||
|         if (requests[namespace]) { | ||||
|             requests[namespace].abort(); | ||||
|         } | ||||
|  | ||||
|         // Start a new AJAX request and store its reference in the correct namespace | ||||
|         requests[namespace] = $.ajax(options); | ||||
|  | ||||
|         // Return the current request in case it's needed | ||||
|         return requests[namespace]; | ||||
|     }; | ||||
| })(jQuery); | ||||
|  | ||||
| function request_textpreview_update() { | ||||
|     if (!$('body').hasClass('preview-text-enabled')) { | ||||
|         console.error("Preview text was requested but body tag was not setup") | ||||
|         return | ||||
|     } | ||||
|  | ||||
|     const data = {}; | ||||
|     $('textarea:visible, input:visible').each(function () { | ||||
|         const $element = $(this); // Cache the jQuery object for the current element | ||||
|         const name = $element.attr('name'); // Get the name attribute of the element | ||||
|         data[name] = $element.is(':checkbox') ? ($element.is(':checked') ? $element.val() : undefined) : $element.val(); | ||||
|     }); | ||||
|  | ||||
|     $.abortiveSingularAjax({ | ||||
|         type: "POST", | ||||
|         url: preview_text_edit_filters_url, | ||||
|         data: data, | ||||
|         namespace: 'watchEdit' | ||||
|     }).done(function (data) { | ||||
|         $('#filters-and-triggers #text-preview-inner').text(data); | ||||
|     }).fail(function (error) { | ||||
|         if (error.statusText === 'abort') { | ||||
|             console.log('Request was aborted due to a new request being fired.'); | ||||
|         } else { | ||||
|             $('#filters-and-triggers #text-preview-inner').text('There was an error communicating with the server.'); | ||||
|         } | ||||
|     }) | ||||
| } | ||||
|  | ||||
| $(document).ready(function () { | ||||
|     $('#notification-setting-reset-to-default').click(function (e) { | ||||
|         $('#notification_title').val(''); | ||||
| @@ -25,6 +74,24 @@ $(document).ready(function () { | ||||
|         $('#notification-tokens-info').toggle(); | ||||
|     }); | ||||
|  | ||||
|     toggleOpacity('#time_between_check_use_default', '#time_between_check'); | ||||
|     toggleOpacity('#time_between_check_use_default', '#time_between_check', false); | ||||
|  | ||||
|     const vh = Math.max(document.documentElement.clientHeight || 0, window.innerHeight || 0); | ||||
|     $("#text-preview-inner").css('max-height', (vh-300)+"px"); | ||||
|  | ||||
|     // Realtime preview of 'Filters & Text' setup | ||||
|     var debounced_request_textpreview_update = request_textpreview_update.debounce(100); | ||||
|  | ||||
|     $("#activate-text-preview").click(function (e) { | ||||
|         $('body').toggleClass('preview-text-enabled') | ||||
|         request_textpreview_update(); | ||||
|  | ||||
|         const method = $('body').hasClass('preview-text-enabled') ? 'on' : 'off'; | ||||
|         $("#text-preview-refresh")[method]('click', debounced_request_textpreview_update); | ||||
|         $('textarea:visible')[method]('keyup blur', debounced_request_textpreview_update); | ||||
|         $('input:visible')[method]('keyup blur change', debounced_request_textpreview_update); | ||||
|         $("#filters-and-triggers-tab")[method]('click', debounced_request_textpreview_update); | ||||
|     }); | ||||
|  | ||||
| }); | ||||
|  | ||||
|   | ||||
| @@ -40,15 +40,39 @@ | ||||
|   } | ||||
| } | ||||
|  | ||||
| #browser-steps-fieldlist { | ||||
|   height: 100%; | ||||
|   overflow-y: scroll; | ||||
| } | ||||
|  | ||||
| #browser-steps .flex-wrapper { | ||||
|   display: flex; | ||||
|   flex-flow: row; | ||||
|   height: 70vh; | ||||
|   font-size: 80%; | ||||
|  | ||||
|   @media screen and (min-width: 800px) { | ||||
|     display: flex; | ||||
|     flex-flow: row; | ||||
|     height: 70vh; | ||||
|     #browser-steps-fieldlist { | ||||
|       flex-grow: 0; /* Don't allow it to grow */ | ||||
|       flex-shrink: 0; /* Don't allow it to shrink */ | ||||
|       flex-basis: auto; /* Base width is determined by the content */ | ||||
|       max-width: 400px; /* Set a max width to prevent overflow */ | ||||
|       padding-left: 1rem; | ||||
|       overflow-y: scroll; | ||||
|     } | ||||
|  | ||||
|  | ||||
|     #browser-steps-ui { | ||||
|       flex-grow: 1; /* Allow it to grow and fill the available space */ | ||||
|       flex-shrink: 1; /* Allow it to shrink if needed */ | ||||
|       flex-basis: 0; /* Start with 0 base width so it stretches as much as possible */ | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   #browser-steps-ui { | ||||
|     background-color: #eee; | ||||
|     border-radius: 5px; | ||||
|   } | ||||
|  | ||||
|   #browser-steps-field-list { | ||||
|     text-align: center; | ||||
|   } | ||||
| } | ||||
|  | ||||
| /*  this is duplicate :( */ | ||||
|   | ||||
| @@ -0,0 +1,45 @@ | ||||
| body.preview-text-enabled { | ||||
|   #filters-and-triggers > div { | ||||
|     display: flex; /* Establishes Flexbox layout */ | ||||
|     gap: 20px; /* Adds space between the columns */ | ||||
|     position: relative; /* Ensures the sticky positioning is relative to this parent */ | ||||
|   } | ||||
|  | ||||
|   /* layout of the page */ | ||||
|   #edit-text-filter, #text-preview { | ||||
|     flex: 1; /* Each column takes an equal amount of available space */ | ||||
|     align-self: flex-start; /* Aligns the right column to the start, allowing it to maintain its content height */ | ||||
|   } | ||||
|  | ||||
|   #edit-text-filter { | ||||
|     #pro-tips { | ||||
|       display: none; | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   #text-preview { | ||||
|     position: sticky; | ||||
|     top: 25px; | ||||
|     display: block !important; | ||||
|   } | ||||
|  | ||||
|   /* actual preview area */ | ||||
|   #text-preview-inner { | ||||
|     background: var(--color-grey-900); | ||||
|     border: 1px solid var(--color-grey-600); | ||||
|     padding: 1rem; | ||||
|     color: #333; | ||||
|     font-family: "Courier New", Courier, monospace; /* Sets the font to a monospace type */ | ||||
|     font-size: 12px; | ||||
|     overflow-x: scroll; | ||||
|     white-space: pre-wrap; /* Preserves whitespace and line breaks like <pre> */ | ||||
|     overflow-wrap: break-word; /* Allows long words to break and wrap to the next line */ | ||||
|   } | ||||
| } | ||||
|  | ||||
| #activate-text-preview { | ||||
|   right: 0; | ||||
|   position: absolute; | ||||
|   z-index: 0; | ||||
|   box-shadow: 1px 1px 4px var(--color-shadow-jump); | ||||
| } | ||||
| @@ -1,6 +1,8 @@ | ||||
|  | ||||
| #selector-wrapper { | ||||
|   height: 100%; | ||||
|   text-align: center; | ||||
|    | ||||
|   max-height: 70vh; | ||||
|   overflow-y: scroll; | ||||
|   position: relative; | ||||
|   | ||||
| @@ -12,6 +12,7 @@ | ||||
| @import "parts/_darkmode"; | ||||
| @import "parts/_menu"; | ||||
| @import "parts/_love"; | ||||
| @import "parts/preview_text_filter"; | ||||
|  | ||||
| body { | ||||
|   color: var(--color-text); | ||||
| @@ -186,12 +187,17 @@ code { | ||||
|   } | ||||
| } | ||||
|  | ||||
| .watch-tag-list { | ||||
|   color: var(--color-white); | ||||
| .inline-tag { | ||||
|   white-space: nowrap; | ||||
|   background: var(--color-text-watch-tag-list); | ||||
|   border-radius: 5px; | ||||
|   padding: 2px 5px; | ||||
|   margin-right: 4px; | ||||
| } | ||||
|  | ||||
| .watch-tag-list { | ||||
|   color: var(--color-white); | ||||
|   background: var(--color-text-watch-tag-list); | ||||
|   @extend .inline-tag; | ||||
| } | ||||
|  | ||||
| .box { | ||||
| @@ -671,14 +677,25 @@ footer { | ||||
|   and also iPads specifically. | ||||
|   */ | ||||
|   .watch-table { | ||||
|     /* make headings work on mobile */ | ||||
|     thead { | ||||
|       display: block; | ||||
|       tr { | ||||
|         th { | ||||
|           display: inline-block; | ||||
|         } | ||||
|       } | ||||
|       .empty-cell { | ||||
|         display: none; | ||||
|       } | ||||
|     } | ||||
|  | ||||
|     /* Force table to not be like tables anymore */ | ||||
|     thead, | ||||
|     tbody, | ||||
|     th, | ||||
|     td, | ||||
|     tr { | ||||
|       display: block; | ||||
|     tbody { | ||||
|       td, | ||||
|       tr { | ||||
|         display: block; | ||||
|       } | ||||
|     } | ||||
|  | ||||
|     .last-checked { | ||||
| @@ -702,13 +719,6 @@ footer { | ||||
|       display: inline-block; | ||||
|     } | ||||
|  | ||||
|     /* Hide table headers (but not display: none;, for accessibility) */ | ||||
|     thead tr { | ||||
|       position: absolute; | ||||
|       top: -9999px; | ||||
|       left: -9999px; | ||||
|     } | ||||
|  | ||||
|     .pure-table td, | ||||
|     .pure-table th { | ||||
|       border: none; | ||||
| @@ -753,6 +763,7 @@ footer { | ||||
|   thead { | ||||
|     background-color: var(--color-background-table-thead); | ||||
|     color: var(--color-text); | ||||
|     border-bottom: 1px solid var(--color-background-table-thead); | ||||
|   } | ||||
|  | ||||
|   td, | ||||
| @@ -1056,9 +1067,8 @@ ul { | ||||
| .tracking-ldjson-price-data { | ||||
|   background-color: var(--color-background-button-green); | ||||
|   color: #000; | ||||
|   padding: 3px; | ||||
|   border-radius: 3px; | ||||
|   white-space: nowrap; | ||||
|   opacity: 0.6; | ||||
|   @extend .inline-tag; | ||||
| } | ||||
|  | ||||
| .ldjson-price-track-offer { | ||||
| @@ -1104,9 +1114,17 @@ ul { | ||||
|     background-color: var(--color-background-button-cancel); | ||||
|     color: #777; | ||||
|   } | ||||
|   padding: 3px; | ||||
|   border-radius: 3px; | ||||
|   white-space: nowrap; | ||||
|   &.error { | ||||
|     background-color: var(--color-background-button-error); | ||||
|     color: #fff; | ||||
|     opacity: 0.7; | ||||
|   } | ||||
|  | ||||
|   svg { | ||||
|     vertical-align: middle; | ||||
|   } | ||||
|  | ||||
|   @extend .inline-tag; | ||||
| } | ||||
|  | ||||
| #chrome-extension-link { | ||||
|   | ||||
| @@ -46,14 +46,36 @@ | ||||
|     #browser_steps li > label { | ||||
|       display: none; } | ||||
|  | ||||
| #browser-steps-fieldlist { | ||||
|   height: 100%; | ||||
|   overflow-y: scroll; } | ||||
|  | ||||
| #browser-steps .flex-wrapper { | ||||
|   display: flex; | ||||
|   flex-flow: row; | ||||
|   height: 70vh; } | ||||
|   font-size: 80%; } | ||||
|   @media screen and (min-width: 800px) { | ||||
|     #browser-steps .flex-wrapper { | ||||
|       display: flex; | ||||
|       flex-flow: row; | ||||
|       height: 70vh; } | ||||
|       #browser-steps .flex-wrapper #browser-steps-fieldlist { | ||||
|         flex-grow: 0; | ||||
|         /* Don't allow it to grow */ | ||||
|         flex-shrink: 0; | ||||
|         /* Don't allow it to shrink */ | ||||
|         flex-basis: auto; | ||||
|         /* Base width is determined by the content */ | ||||
|         max-width: 400px; | ||||
|         /* Set a max width to prevent overflow */ | ||||
|         padding-left: 1rem; | ||||
|         overflow-y: scroll; } | ||||
|       #browser-steps .flex-wrapper #browser-steps-ui { | ||||
|         flex-grow: 1; | ||||
|         /* Allow it to grow and fill the available space */ | ||||
|         flex-shrink: 1; | ||||
|         /* Allow it to shrink if needed */ | ||||
|         flex-basis: 0; | ||||
|         /* Start with 0 base width so it stretches as much as possible */ } } | ||||
|   #browser-steps .flex-wrapper #browser-steps-ui { | ||||
|     background-color: #eee; | ||||
|     border-radius: 5px; } | ||||
|   #browser-steps .flex-wrapper #browser-steps-field-list { | ||||
|     text-align: center; } | ||||
|  | ||||
| /*  this is duplicate :( */ | ||||
| #browsersteps-selector-wrapper { | ||||
| @@ -411,6 +433,47 @@ html[data-darkmode="true"] #toggle-light-mode .icon-dark { | ||||
|     fill: #ff0000 !important; | ||||
|     transition: all ease 0.3s !important; } | ||||
|  | ||||
| body.preview-text-enabled { | ||||
|   /* layout of the page */ | ||||
|   /* actual preview area */ } | ||||
|   body.preview-text-enabled #filters-and-triggers > div { | ||||
|     display: flex; | ||||
|     /* Establishes Flexbox layout */ | ||||
|     gap: 20px; | ||||
|     /* Adds space between the columns */ | ||||
|     position: relative; | ||||
|     /* Ensures the sticky positioning is relative to this parent */ } | ||||
|   body.preview-text-enabled #edit-text-filter, body.preview-text-enabled #text-preview { | ||||
|     flex: 1; | ||||
|     /* Each column takes an equal amount of available space */ | ||||
|     align-self: flex-start; | ||||
|     /* Aligns the right column to the start, allowing it to maintain its content height */ } | ||||
|   body.preview-text-enabled #edit-text-filter #pro-tips { | ||||
|     display: none; } | ||||
|   body.preview-text-enabled #text-preview { | ||||
|     position: sticky; | ||||
|     top: 25px; | ||||
|     display: block !important; } | ||||
|   body.preview-text-enabled #text-preview-inner { | ||||
|     background: var(--color-grey-900); | ||||
|     border: 1px solid var(--color-grey-600); | ||||
|     padding: 1rem; | ||||
|     color: #333; | ||||
|     font-family: "Courier New", Courier, monospace; | ||||
|     /* Sets the font to a monospace type */ | ||||
|     font-size: 12px; | ||||
|     overflow-x: scroll; | ||||
|     white-space: pre-wrap; | ||||
|     /* Preserves whitespace and line breaks like <pre> */ | ||||
|     overflow-wrap: break-word; | ||||
|     /* Allows long words to break and wrap to the next line */ } | ||||
|  | ||||
| #activate-text-preview { | ||||
|   right: 0; | ||||
|   position: absolute; | ||||
|   z-index: 0; | ||||
|   box-shadow: 1px 1px 4px var(--color-shadow-jump); } | ||||
|  | ||||
| body { | ||||
|   color: var(--color-text); | ||||
|   background: var(--color-background-page); | ||||
| @@ -531,12 +594,15 @@ code { | ||||
|     content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==); | ||||
|     margin: 0 3px 0 5px; } | ||||
|  | ||||
| .inline-tag, .watch-tag-list, .tracking-ldjson-price-data, .restock-label { | ||||
|   white-space: nowrap; | ||||
|   border-radius: 5px; | ||||
|   padding: 2px 5px; | ||||
|   margin-right: 4px; } | ||||
|  | ||||
| .watch-tag-list { | ||||
|   color: var(--color-white); | ||||
|   white-space: nowrap; | ||||
|   background: var(--color-text-watch-tag-list); | ||||
|   border-radius: 5px; | ||||
|   padding: 2px 5px; } | ||||
|   background: var(--color-text-watch-tag-list); } | ||||
|  | ||||
| .box { | ||||
|   max-width: 80%; | ||||
| @@ -863,14 +929,17 @@ footer { | ||||
|   and also iPads specifically. | ||||
|   */ | ||||
|   .watch-table { | ||||
|     /* make headings work on mobile */ | ||||
|     /* Force table to not be like tables anymore */ | ||||
|     /* Force table to not be like tables anymore */ | ||||
|     /* Hide table headers (but not display: none;, for accessibility) */ } | ||||
|     .watch-table thead, | ||||
|     .watch-table tbody, | ||||
|     .watch-table th, | ||||
|     .watch-table td, | ||||
|     .watch-table tr { | ||||
|     /* Force table to not be like tables anymore */ } | ||||
|     .watch-table thead { | ||||
|       display: block; } | ||||
|       .watch-table thead tr th { | ||||
|         display: inline-block; } | ||||
|       .watch-table thead .empty-cell { | ||||
|         display: none; } | ||||
|     .watch-table tbody td, | ||||
|     .watch-table tbody tr { | ||||
|       display: block; } | ||||
|     .watch-table .last-checked > span { | ||||
|       vertical-align: middle; } | ||||
| @@ -882,10 +951,6 @@ footer { | ||||
|       content: "Last Changed "; } | ||||
|     .watch-table td.inline { | ||||
|       display: inline-block; } | ||||
|     .watch-table thead tr { | ||||
|       position: absolute; | ||||
|       top: -9999px; | ||||
|       left: -9999px; } | ||||
|     .watch-table .pure-table td, | ||||
|     .watch-table .pure-table th { | ||||
|       border: none; } | ||||
| @@ -912,7 +977,8 @@ footer { | ||||
|   border-color: var(--color-border-table-cell); } | ||||
|   .pure-table thead { | ||||
|     background-color: var(--color-background-table-thead); | ||||
|     color: var(--color-text); } | ||||
|     color: var(--color-text); | ||||
|     border-bottom: 1px solid var(--color-background-table-thead); } | ||||
|   .pure-table td, | ||||
|   .pure-table th { | ||||
|     border-left-color: var(--color-border-table-cell); } | ||||
| @@ -1065,6 +1131,7 @@ ul { | ||||
|  | ||||
| #selector-wrapper { | ||||
|   height: 100%; | ||||
|   text-align: center; | ||||
|   max-height: 70vh; | ||||
|   overflow-y: scroll; | ||||
|   position: relative; } | ||||
| @@ -1152,9 +1219,7 @@ ul { | ||||
| .tracking-ldjson-price-data { | ||||
|   background-color: var(--color-background-button-green); | ||||
|   color: #000; | ||||
|   padding: 3px; | ||||
|   border-radius: 3px; | ||||
|   white-space: nowrap; } | ||||
|   opacity: 0.6; } | ||||
|  | ||||
| .ldjson-price-track-offer { | ||||
|   font-weight: bold; | ||||
| @@ -1179,16 +1244,21 @@ ul { | ||||
|       #quick-watch-processor-type ul li > * { | ||||
|         display: inline-block; } | ||||
|  | ||||
| .restock-label { | ||||
|   padding: 3px; | ||||
|   border-radius: 3px; | ||||
|   white-space: nowrap; } | ||||
|   .restock-label.in-stock { | ||||
|     background-color: var(--color-background-button-green); | ||||
|     color: #fff; } | ||||
|   .restock-label.not-in-stock { | ||||
|     background-color: var(--color-background-button-cancel); | ||||
|     color: #777; } | ||||
| .restock-label.in-stock { | ||||
|   background-color: var(--color-background-button-green); | ||||
|   color: #fff; } | ||||
|  | ||||
| .restock-label.not-in-stock { | ||||
|   background-color: var(--color-background-button-cancel); | ||||
|   color: #777; } | ||||
|  | ||||
| .restock-label.error { | ||||
|   background-color: var(--color-background-button-error); | ||||
|   color: #fff; | ||||
|   opacity: 0.7; } | ||||
|  | ||||
| .restock-label svg { | ||||
|   vertical-align: middle; } | ||||
|  | ||||
| #chrome-extension-link { | ||||
|   padding: 9px; | ||||
|   | ||||
| @@ -11,13 +11,15 @@ from threading import Lock | ||||
| import json | ||||
| import os | ||||
| import re | ||||
| import requests | ||||
| import secrets | ||||
| import threading | ||||
| import time | ||||
| import uuid as uuid_builder | ||||
| from loguru import logger | ||||
|  | ||||
| from .processors import get_custom_watch_obj_for_processor | ||||
| from .processors.restock_diff import Restock | ||||
|  | ||||
| # Because the server will run as a daemon and wont know the URL for notification links when firing off a notification | ||||
| BASE_URL_NOT_SET_TEXT = '("Base URL" not set - see settings - notifications)' | ||||
|  | ||||
| @@ -81,9 +83,13 @@ class ChangeDetectionStore: | ||||
|  | ||||
|                 # Convert each existing watch back to the Watch.model object | ||||
|                 for uuid, watch in self.__data['watching'].items(): | ||||
|                     watch['uuid']=uuid | ||||
|                     self.__data['watching'][uuid] = Watch.model(datastore_path=self.datastore_path, default=watch) | ||||
|                     logger.info(f"Watching: {uuid} {self.__data['watching'][uuid]['url']}") | ||||
|                     self.__data['watching'][uuid] = self.rehydrate_entity(uuid, watch) | ||||
|                     logger.info(f"Watching: {uuid} {watch['url']}") | ||||
|  | ||||
|                 # And for Tags also, should be Restock type because it has extra settings | ||||
|                 for uuid, tag in self.__data['settings']['application']['tags'].items(): | ||||
|                     self.__data['settings']['application']['tags'][uuid] = self.rehydrate_entity(uuid, tag, processor_override='restock_diff') | ||||
|                     logger.info(f"Tag: {uuid} {tag['title']}") | ||||
|  | ||||
|         # First time ran, Create the datastore. | ||||
|         except (FileNotFoundError): | ||||
| @@ -138,6 +144,22 @@ class ChangeDetectionStore: | ||||
|         # Finally start the thread that will manage periodic data saves to JSON | ||||
|         save_data_thread = threading.Thread(target=self.save_datastore).start() | ||||
|  | ||||
|     def rehydrate_entity(self, uuid, entity, processor_override=None): | ||||
|         """Set the dict back to the dict Watch object""" | ||||
|         entity['uuid'] = uuid | ||||
|  | ||||
|         if processor_override: | ||||
|             watch_class = get_custom_watch_obj_for_processor(processor_override) | ||||
|             entity['processor']=processor_override | ||||
|         else: | ||||
|             watch_class = get_custom_watch_obj_for_processor(entity.get('processor')) | ||||
|  | ||||
|         if entity.get('uuid') != 'text_json_diff': | ||||
|             logger.trace(f"Loading Watch object '{watch_class.__module__}.{watch_class.__name__}' for UUID {uuid}") | ||||
|  | ||||
|         entity = watch_class(datastore_path=self.datastore_path, default=entity) | ||||
|         return entity | ||||
|  | ||||
|     def set_last_viewed(self, uuid, timestamp): | ||||
|         logger.debug(f"Setting watch UUID: {uuid} last viewed to {int(timestamp)}") | ||||
|         self.data['watching'][uuid].update({'last_viewed': int(timestamp)}) | ||||
| @@ -163,7 +185,6 @@ class ChangeDetectionStore: | ||||
|                         del (update_obj[dict_key]) | ||||
|  | ||||
|             self.__data['watching'][uuid].update(update_obj) | ||||
|  | ||||
|         self.needs_write = True | ||||
|  | ||||
|     @property | ||||
| @@ -177,6 +198,9 @@ class ChangeDetectionStore: | ||||
|  | ||||
|     @property | ||||
|     def has_unviewed(self): | ||||
|         if not self.__data.get('watching'): | ||||
|             return None | ||||
|  | ||||
|         for uuid, watch in self.__data['watching'].items(): | ||||
|             if watch.history_n >= 2 and watch.viewed == False: | ||||
|                 return True | ||||
| @@ -241,34 +265,11 @@ class ChangeDetectionStore: | ||||
|  | ||||
|     # Remove a watchs data but keep the entry (URL etc) | ||||
|     def clear_watch_history(self, uuid): | ||||
|         import pathlib | ||||
|  | ||||
|         self.__data['watching'][uuid].update({ | ||||
|                 'browser_steps_last_error_step' : None, | ||||
|                 'check_count': 0, | ||||
|                 'fetch_time' : 0.0, | ||||
|                 'has_ldjson_price_data': None, | ||||
|                 'in_stock': None, | ||||
|                 'last_checked': 0, | ||||
|                 'last_error': False, | ||||
|                 'last_notification_error': False, | ||||
|                 'last_viewed': 0, | ||||
|                 'previous_md5': False, | ||||
|                 'previous_md5_before_filters': False, | ||||
|                 'remote_server_reply': None, | ||||
|                 'track_ldjson_price_data': None, | ||||
|             }) | ||||
|  | ||||
|         # JSON Data, Screenshots, Textfiles (history index and snapshots), HTML in the future etc | ||||
|         for item in pathlib.Path(os.path.join(self.datastore_path, uuid)).rglob("*.*"): | ||||
|             unlink(item) | ||||
|  | ||||
|         # Force the attr to recalculate | ||||
|         bump = self.__data['watching'][uuid].history | ||||
|  | ||||
|         self.__data['watching'][uuid].clear_watch() | ||||
|         self.needs_write_urgent = True | ||||
|  | ||||
|     def add_watch(self, url, tag='', extras=None, tag_uuids=None, write_to_disk_now=True): | ||||
|         import requests | ||||
|  | ||||
|         if extras is None: | ||||
|             extras = {} | ||||
| @@ -342,11 +343,13 @@ class ChangeDetectionStore: | ||||
|         if apply_extras.get('tags'): | ||||
|             apply_extras['tags'] = list(set(apply_extras.get('tags'))) | ||||
|  | ||||
|         new_watch = Watch.model(datastore_path=self.datastore_path, url=url) | ||||
|         # If the processor also has its own Watch implementation | ||||
|         watch_class = get_custom_watch_obj_for_processor(apply_extras.get('processor')) | ||||
|         new_watch = watch_class(datastore_path=self.datastore_path, url=url) | ||||
|  | ||||
|         new_uuid = new_watch.get('uuid') | ||||
|  | ||||
|         logger.debug(f"Adding URL {url} - {new_uuid}") | ||||
|         logger.debug(f"Adding URL '{url}' - {new_uuid}") | ||||
|  | ||||
|         for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']: | ||||
|             if k in apply_extras: | ||||
| @@ -376,46 +379,6 @@ class ChangeDetectionStore: | ||||
|  | ||||
|         return False | ||||
|  | ||||
|     # Save as PNG, PNG is larger but better for doing visual diff in the future | ||||
|     def save_screenshot(self, watch_uuid, screenshot: bytes, as_error=False): | ||||
|         if not self.data['watching'].get(watch_uuid): | ||||
|             return | ||||
|  | ||||
|         if as_error: | ||||
|             target_path = os.path.join(self.datastore_path, watch_uuid, "last-error-screenshot.png") | ||||
|         else: | ||||
|             target_path = os.path.join(self.datastore_path, watch_uuid, "last-screenshot.png") | ||||
|  | ||||
|         self.data['watching'][watch_uuid].ensure_data_dir_exists() | ||||
|  | ||||
|         with open(target_path, 'wb') as f: | ||||
|             f.write(screenshot) | ||||
|             f.close() | ||||
|  | ||||
|  | ||||
|     def save_error_text(self, watch_uuid, contents): | ||||
|         if not self.data['watching'].get(watch_uuid): | ||||
|             return | ||||
|  | ||||
|         self.data['watching'][watch_uuid].ensure_data_dir_exists() | ||||
|         target_path = os.path.join(self.datastore_path, watch_uuid, "last-error.txt") | ||||
|         with open(target_path, 'w') as f: | ||||
|             f.write(contents) | ||||
|  | ||||
|     def save_xpath_data(self, watch_uuid, data, as_error=False): | ||||
|  | ||||
|         if not self.data['watching'].get(watch_uuid): | ||||
|             return | ||||
|         if as_error: | ||||
|             target_path = os.path.join(self.datastore_path, watch_uuid, "elements-error.json") | ||||
|         else: | ||||
|             target_path = os.path.join(self.datastore_path, watch_uuid, "elements.json") | ||||
|         self.data['watching'][watch_uuid].ensure_data_dir_exists() | ||||
|         with open(target_path, 'w') as f: | ||||
|             f.write(json.dumps(data)) | ||||
|             f.close() | ||||
|  | ||||
|  | ||||
|     def sync_to_json(self): | ||||
|         logger.info("Saving JSON..") | ||||
|         try: | ||||
| @@ -622,7 +585,8 @@ class ChangeDetectionStore: | ||||
|         # Eventually almost everything todo with a watch will apply as a Tag | ||||
|         # So we use the same model as a Watch | ||||
|         with self.lock: | ||||
|             new_tag = Watch.model(datastore_path=self.datastore_path, default={ | ||||
|             from .model import Tag | ||||
|             new_tag = Tag.model(datastore_path=self.datastore_path, default={ | ||||
|                 'title': name.strip(), | ||||
|                 'date_created': int(time.time()) | ||||
|             }) | ||||
| @@ -661,6 +625,39 @@ class ChangeDetectionStore: | ||||
|         return next((v for v in tags if v.get('title', '').lower() == tag_name.lower()), | ||||
|                     None) | ||||
|  | ||||
|     def any_watches_have_processor_by_name(self, processor_name): | ||||
|         for watch in self.data['watching'].values(): | ||||
|             if watch.get('processor') == processor_name: | ||||
|                 return True | ||||
|         return False | ||||
|  | ||||
|     def get_unique_notification_tokens_available(self): | ||||
|         # Ask each type of watch if they have any extra notification token to add to the validation | ||||
|         extra_notification_tokens = {} | ||||
|         watch_processors_checked = set() | ||||
|  | ||||
|         for watch_uuid, watch in self.__data['watching'].items(): | ||||
|             processor = watch.get('processor') | ||||
|             if processor not in watch_processors_checked: | ||||
|                 extra_notification_tokens.update(watch.extra_notification_token_values()) | ||||
|                 watch_processors_checked.add(processor) | ||||
|  | ||||
|         return extra_notification_tokens | ||||
|  | ||||
|     def get_unique_notification_token_placeholders_available(self): | ||||
|         # The actual description of the tokens, could be combined with get_unique_notification_tokens_available instead of doing this twice | ||||
|         extra_notification_tokens = [] | ||||
|         watch_processors_checked = set() | ||||
|  | ||||
|         for watch_uuid, watch in self.__data['watching'].items(): | ||||
|             processor = watch.get('processor') | ||||
|             if processor not in watch_processors_checked: | ||||
|                 extra_notification_tokens+=watch.extra_notification_token_placeholder_info() | ||||
|                 watch_processors_checked.add(processor) | ||||
|  | ||||
|         return extra_notification_tokens | ||||
|  | ||||
|  | ||||
|     def get_updates_available(self): | ||||
|         import inspect | ||||
|         updates_available = [] | ||||
| @@ -884,3 +881,30 @@ class ChangeDetectionStore: | ||||
|                 # Something custom here | ||||
|                 self.__data["watching"][uuid]['time_between_check_use_default'] = False | ||||
|  | ||||
|     # Correctly set datatype for older installs where 'tag' was string and update_12 did not catch it | ||||
|     def update_16(self): | ||||
|         for uuid, watch in self.data['watching'].items(): | ||||
|             if isinstance(watch.get('tags'), str): | ||||
|                 self.data['watching'][uuid]['tags'] = [] | ||||
|  | ||||
|     # Migrate old 'in_stock' values to the new Restock | ||||
|     def update_17(self): | ||||
|         for uuid, watch in self.data['watching'].items(): | ||||
|             if 'in_stock' in watch: | ||||
|                 watch['restock'] = Restock({'in_stock': watch.get('in_stock')}) | ||||
|                 del watch['in_stock'] | ||||
|  | ||||
|     # Migrate old restock settings | ||||
|     def update_18(self): | ||||
|         for uuid, watch in self.data['watching'].items(): | ||||
|             if not watch.get('restock_settings'): | ||||
|                 # So we enable price following by default | ||||
|                 self.data['watching'][uuid]['restock_settings'] = {'follow_price_changes': True} | ||||
|  | ||||
|             # Migrate and cleanoff old value | ||||
|             self.data['watching'][uuid]['restock_settings']['in_stock_processing'] = 'in_stock_only' if watch.get( | ||||
|                 'in_stock_only') else 'all_changes' | ||||
|  | ||||
|             if self.data['watching'][uuid].get('in_stock_only'): | ||||
|                 del (self.data['watching'][uuid]['in_stock_only']) | ||||
|  | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
|  | ||||
| {% from '_helpers.html' import render_field %} | ||||
|  | ||||
| {% macro render_common_settings_form(form, emailprefix, settings_application) %} | ||||
| {% macro render_common_settings_form(form, emailprefix, settings_application, extra_notification_token_placeholder_info) %} | ||||
|                         <div class="pure-control-group"> | ||||
|                             {{ render_field(form.notification_urls, rows=5, placeholder="Examples: | ||||
|     Gitter - gitter://token/room | ||||
| @@ -11,8 +11,11 @@ | ||||
|     class="notification-urls" ) | ||||
|                             }} | ||||
|                             <div class="pure-form-message-inline"> | ||||
|                               <ul> | ||||
|                                 <li>Use <a target=_new href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service! <i><a target=_new href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes">Please read the notification services wiki here for important configuration notes</a></i>.</li> | ||||
|                                 <p> | ||||
|                                 <strong>Tip:</strong> Use <a target=_new href="https://github.com/caronc/apprise">AppRise Notification URLs</a> for notification to just about any service! <i><a target=_new href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes">Please read the notification services wiki here for important configuration notes</a></i>.<br> | ||||
| </p> | ||||
|                                 <div data-target="#advanced-help-notifications" class="toggle-show pure-button button-tag button-xsmall">Show advanced help and tips</div> | ||||
|                                 <ul style="display: none" id="advanced-help-notifications"> | ||||
|                                 <li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_discord">discord://</a></code> (or <code>https://discord.com/api/webhooks...</code>)) only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li> | ||||
|                                 <li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> bots can't send messages to other bots, so you should specify chat ID of non-bot user.</li> | ||||
|                                 <li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li> | ||||
| @@ -40,7 +43,7 @@ | ||||
|  | ||||
|                             </div> | ||||
|                             <div class="pure-controls"> | ||||
|                                 <div id="notification-token-toggle" class="pure-button button-tag button-xsmall">Show token/placeholders</div> | ||||
|                                 <div data-target="#notification-tokens-info" class="toggle-show pure-button button-tag button-xsmall">Show token/placeholders</div> | ||||
|                             </div> | ||||
|                             <div class="pure-controls" style="display: none;" id="notification-tokens-info"> | ||||
|                                 <table class="pure-table" id="token-table"> | ||||
| @@ -107,7 +110,15 @@ | ||||
|                                     <tr> | ||||
|                                         <td><code>{{ '{{triggered_text}}' }}</code></td> | ||||
|                                         <td>Text that tripped the trigger from filters</td> | ||||
|                                     </tr> | ||||
|  | ||||
|                                         {% if extra_notification_token_placeholder_info %} | ||||
|                                             {% for token in extra_notification_token_placeholder_info %} | ||||
|                                                 <tr> | ||||
|                                                     <td><code>{{ '{{' }}{{ token[0] }}{{ '}}' }}</code></td> | ||||
|                                                     <td>{{ token[1] }}</td> | ||||
|                                                 </tr> | ||||
|                                             {% endfor %} | ||||
|                                         {% endif %} | ||||
|                                     </tbody> | ||||
|                                 </table> | ||||
|                                 <div class="pure-form-message-inline"> | ||||
|   | ||||
| @@ -26,10 +26,14 @@ | ||||
|     <meta name="msapplication-TileColor" content="#da532c"> | ||||
|     <meta name="msapplication-config" content="favicons/browserconfig.xml"> | ||||
|     <meta name="theme-color" content="#ffffff"> | ||||
|     <script> | ||||
|         const csrftoken="{{ csrf_token() }}"; | ||||
|     </script> | ||||
|     <script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script> | ||||
|     <script src="{{url_for('static_content', group='js', filename='csrf.js')}}" defer></script> | ||||
|   </head> | ||||
|  | ||||
|   <body> | ||||
|   <body class=""> | ||||
|     <div class="header"> | ||||
|       <div class="home-menu pure-menu pure-menu-horizontal pure-menu-fixed" id="nav-menu"> | ||||
|         {% if has_password and not current_user.is_authenticated %} | ||||
|   | ||||
| @@ -4,6 +4,7 @@ | ||||
| {% from '_common_fields.html' import render_common_settings_form %} | ||||
| <script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='global-settings.js')}}" defer></script> | ||||
| <script> | ||||
|     const browser_steps_available_screenshots=JSON.parse('{{ watch.get_browsersteps_available_screenshots|tojson }}'); | ||||
|     const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}'); | ||||
| @@ -16,7 +17,7 @@ | ||||
|     const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}'); | ||||
| {% endif %} | ||||
|     const notification_base_url="{{url_for('ajax_callback_send_notification_test', watch_uuid=uuid)}}"; | ||||
|     const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %}; | ||||
|     const playwright_enabled={% if playwright_enabled %}true{% else %}false{% endif %}; | ||||
|     const recheck_proxy_start_url="{{url_for('check_proxies.start_check', uuid=uuid)}}"; | ||||
|     const proxy_recheck_status_url="{{url_for('check_proxies.get_recheck_status', uuid=uuid)}}"; | ||||
|     const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}"; | ||||
| @@ -41,17 +42,15 @@ | ||||
|         <ul> | ||||
|             <li class="tab" id=""><a href="#general">General</a></li> | ||||
|             <li class="tab"><a href="#request">Request</a></li> | ||||
|             {% if extra_tab_content %} | ||||
|             <li class="tab"><a href="#extras_tab">{{ extra_tab_content }}</a></li> | ||||
|             {% endif %} | ||||
|             {% if playwright_enabled %} | ||||
|             <li class="tab"><a id="browsersteps-tab" href="#browser-steps">Browser Steps</a></li> | ||||
|             {% endif %} | ||||
|  | ||||
|             {% if watch['processor'] == 'text_json_diff' %} | ||||
|             <li class="tab"><a id="visualselector-tab" href="#visualselector">Visual Filter Selector</a></li> | ||||
|             <li class="tab"><a href="#filters-and-triggers">Filters & Triggers</a></li> | ||||
|             {% endif %} | ||||
|  | ||||
|             {% if watch['processor'] == 'restock_diff' %} | ||||
|             <li class="tab"><a href="#restock">Restock Detection</a></li> | ||||
|             <li class="tab" id="filters-and-triggers-tab"><a href="#filters-and-triggers">Filters & Triggers</a></li> | ||||
|             {% endif %} | ||||
|             <li class="tab"><a href="#notifications">Notifications</a></li> | ||||
|             <li class="tab"><a href="#stats">Stats</a></li> | ||||
| @@ -69,16 +68,9 @@ | ||||
|                         {{ render_field(form.url, placeholder="https://...", required=true, class="m-d") }} | ||||
|                         <span class="pure-form-message-inline">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></span><br> | ||||
|                         <span class="pure-form-message-inline">You can use variables in the URL, perfect for inserting the current date and other logic, <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a></span><br> | ||||
|                         <span class="pure-form-message-inline"> | ||||
|                         {% if watch['processor'] == 'text_json_diff' %} | ||||
|                             Current mode: <strong>Webpage Text/HTML, JSON and PDF changes.</strong><br> | ||||
|                           <a href="{{url_for('edit_page', uuid=uuid)}}?switch_processor=restock_diff" class="pure-button button-xsmall">Switch to re-stock detection mode.</a> | ||||
|                         {% else %} | ||||
|                         Current mode: <strong>Re-stock detection.</strong><br> | ||||
|                           <a href="{{url_for('edit_page', uuid=uuid)}}?switch_processor=text_json_diff" class="pure-button button-xsmall">Switch to Webpage Text/HTML, JSON and PDF changes mode.</a> | ||||
|                         {% endif %} | ||||
|                         </span> | ||||
|  | ||||
|                     </div> | ||||
|                     <div class="pure-control-group inline-radio"> | ||||
|                         {{ render_field(form.processor) }} | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.title, class="m-d") }} | ||||
| @@ -208,7 +200,7 @@ User-Agent: wonderbra 1.0") }} | ||||
|                         <div id="loading-status-text" style="display: none;">Please wait, first browser step can take a little time to load..<div class="spinner"></div></div> | ||||
|                         <div class="flex-wrapper" > | ||||
|  | ||||
|                             <div id="browser-steps-ui" class="noselect"  style="width: 100%; background-color: #eee; border-radius: 5px;"> | ||||
|                             <div id="browser-steps-ui" class="noselect"> | ||||
|  | ||||
|                                 <div class="noselect"  id="browsersteps-selector-wrapper" style="width: 100%"> | ||||
|                                     <span class="loader" > | ||||
| @@ -223,7 +215,7 @@ User-Agent: wonderbra 1.0") }} | ||||
|                                     <canvas  class="noselect" id="browsersteps-selector-canvas" style="max-width: 100%; width: 100%;"></canvas> | ||||
|                                 </div> | ||||
|                             </div> | ||||
|                             <div id="browser-steps-fieldlist" style="padding-left: 1em;  width: 350px; font-size: 80%;" > | ||||
|                             <div id="browser-steps-fieldlist" > | ||||
|                                 <span id="browser-seconds-remaining">Loading</span> <span style="font-size: 80%;"> (<a target=_new href="https://github.com/dgtlmoon/changedetection.io/pull/478/files#diff-1a79d924d1840c485238e66772391268a89c95b781d69091384cf1ea1ac146c9R4">?</a>) </span> | ||||
|                                 {{ render_field(form.browser_steps) }} | ||||
|                             </div> | ||||
| @@ -255,14 +247,17 @@ User-Agent: wonderbra 1.0") }} | ||||
|                         {% endif %} | ||||
|                         <a href="#notifications" id="notification-setting-reset-to-default" class="pure-button button-xsmall" style="right: 20px; top: 20px; position: absolute; background-color: #5f42dd; border-radius: 4px; font-size: 70%; color: #fff">Use system defaults</a> | ||||
|  | ||||
|                         {{ render_common_settings_form(form, emailprefix, settings_application) }} | ||||
|                         {{ render_common_settings_form(form, emailprefix, settings_application, extra_notification_token_placeholder_info) }} | ||||
|                     </div> | ||||
|                 </fieldset> | ||||
|             </div> | ||||
|  | ||||
|             {% if watch['processor'] == 'text_json_diff' %} | ||||
|             <div class="tab-pane-inner" id="filters-and-triggers"> | ||||
|                     <div class="pure-control-group"> | ||||
|                 <span id="activate-text-preview" class="pure-button pure-button-primary button-xsmall">Activate preview</span> | ||||
|               <div> | ||||
|               <div id="edit-text-filter"> | ||||
|                     <div class="pure-control-group" id="pro-tips"> | ||||
|                             <strong>Pro-tips:</strong><br> | ||||
|                             <ul> | ||||
|                                 <li> | ||||
| @@ -284,15 +279,15 @@ xpath://body/div/span[contains(@class, 'example-class')]", | ||||
|                         {% if '/text()' in  field %} | ||||
|                           <span class="pure-form-message-inline"><strong>Note!: //text() function does not work where the <element> contains <![CDATA[]]></strong></span><br> | ||||
|                         {% endif %} | ||||
|                         <span class="pure-form-message-inline">One rule per line, <i>any</i> rules that matches will be used.<br> | ||||
|  | ||||
|                     <ul> | ||||
|                         <span class="pure-form-message-inline">One CSS, xPath, JSON Path/JQ selector per line, <i>any</i> rules that matches will be used.<br> | ||||
| <p><div data-target="#advanced-help-selectors" class="toggle-show pure-button button-tag button-xsmall">Show advanced help and tips</div><br></p> | ||||
|                     <ul id="advanced-help-selectors" style="display: none;"> | ||||
|                         <li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li> | ||||
|                         <li>JSON - Limit text to this JSON rule, using either <a href="https://pypi.org/project/jsonpath-ng/" target="new">JSONPath</a> or <a href="https://stedolan.github.io/jq/" target="new">jq</a> (if installed). | ||||
|                             <ul> | ||||
|                                 <li>JSONPath: Prefix with <code>json:</code>, use <code>json:$</code> to force re-formatting if required,  <a href="https://jsonpath.com/" target="new">test your JSONPath here</a>.</li> | ||||
|                                 {% if jq_support %} | ||||
|                                 <li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>.</li> | ||||
|                                 <li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>. Prefix <code>jqraw:</code> outputs the results as text instead of a JSON list.</li> | ||||
|                                 {% else %} | ||||
|                                 <li>jq support not installed</li> | ||||
|                                 {% endif %} | ||||
| @@ -306,21 +301,25 @@ xpath://body/div/span[contains(@class, 'example-class')]", | ||||
|                                 <li>To use XPath1.0: Prefix with <code>xpath1:</code></li> | ||||
|                             </ul> | ||||
|                             </li> | ||||
|                     </ul> | ||||
|                     Please be sure that you thoroughly understand how to write CSS, JSONPath, XPath{% if jq_support %}, or jq selector{%endif%} rules before filing an issue on GitHub! <a | ||||
|                     <li> | ||||
|                         Please be sure that you thoroughly understand how to write CSS, JSONPath, XPath{% if jq_support %}, or jq selector{%endif%} rules before filing an issue on GitHub! <a | ||||
|                                 href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br> | ||||
|                     </li> | ||||
|                     </ul> | ||||
|  | ||||
|                 </span> | ||||
|                     </div> | ||||
|                 <fieldset class="pure-control-group"> | ||||
|                     {{ render_field(form.subtractive_selectors, rows=5, placeholder=has_tag_filters_extra+"header | ||||
| footer | ||||
| nav | ||||
| .stockticker") }} | ||||
| .stockticker | ||||
| //*[contains(text(), 'Advertisement')]") }} | ||||
|                     <span class="pure-form-message-inline"> | ||||
|                         <ul> | ||||
|                           <li> Remove HTML element(s) by CSS selector before text conversion. </li> | ||||
|                           <li> Don't paste HTML here, use only CSS selectors </li> | ||||
|                           <li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li> | ||||
|                           <li> Remove HTML element(s) by CSS and XPath selectors before text conversion. </li> | ||||
|                           <li> Don't paste HTML here, use only CSS and XPath selectors </li> | ||||
|                           <li> Add multiple elements, CSS or XPath selectors per line to ignore multiple parts of the HTML. </li> | ||||
|                         </ul> | ||||
|                       </span> | ||||
|                 </fieldset> | ||||
| @@ -335,14 +334,21 @@ nav | ||||
|                     <span class="pure-form-message-inline">So it's always better to select <strong>Added</strong>+<strong>Replaced</strong> when you're interested in new content.</span><br> | ||||
|                     <span class="pure-form-message-inline">When content is merely moved in a list, it will also trigger an <strong>addition</strong>, consider enabling <code><strong>Only trigger when unique lines appear</strong></code></span> | ||||
|                 </fieldset> | ||||
|  | ||||
|                 <fieldset class="pure-control-group"> | ||||
|                     {{ render_checkbox_field(form.check_unique_lines) }} | ||||
|                     <span class="pure-form-message-inline">Good for websites that just move the content around, and you want to know when NEW content is added, compares new lines against all history for this watch.</span> | ||||
|                 </fieldset> | ||||
|                 <fieldset class="pure-control-group"> | ||||
|                     {{ render_checkbox_field(form.remove_duplicate_lines) }} | ||||
|                     <span class="pure-form-message-inline">Remove duplicate lines of text</span> | ||||
|                 </fieldset> | ||||
|                 <fieldset class="pure-control-group"> | ||||
|                     {{ render_checkbox_field(form.sort_text_alphabetically) }} | ||||
|                     <span class="pure-form-message-inline">Helps reduce changes detected caused by sites shuffling lines around, combine with <i>check unique lines</i> below.</span> | ||||
|                 </fieldset> | ||||
|                 <fieldset class="pure-control-group"> | ||||
|                     {{ render_checkbox_field(form.check_unique_lines) }} | ||||
|                     <span class="pure-form-message-inline">Good for websites that just move the content around, and you want to know when NEW content is added, compares new lines against all history for this watch.</span> | ||||
|                     {{ render_checkbox_field(form.trim_text_whitespace) }} | ||||
|                     <span class="pure-form-message-inline">Remove any whitespace before and after each line of text</span> | ||||
|                 </fieldset> | ||||
|                 <fieldset> | ||||
|                     <div class="pure-control-group"> | ||||
| @@ -412,19 +418,25 @@ Unavailable") }} | ||||
|                 </fieldset> | ||||
|                 </div> | ||||
|             </div> | ||||
|             {% endif %} | ||||
|  | ||||
|             {% if watch['processor'] == 'restock_diff' %} | ||||
|             <div class="tab-pane-inner" id="restock"> | ||||
|                     <fieldset> | ||||
|                         <div class="pure-control-group"> | ||||
|                             {{ render_checkbox_field(form.in_stock_only) }} | ||||
|                             <span class="pure-form-message-inline">Only trigger notifications when page changes from <strong>out of stock</strong> to <strong>back in stock</strong></span> | ||||
|                         </div> | ||||
|                     </fieldset> | ||||
|               <div id="text-preview" style="display: none;" > | ||||
|                     <script> | ||||
|                         const preview_text_edit_filters_url="{{url_for('watch_get_preview_rendered', uuid=uuid)}}"; | ||||
|                     </script> | ||||
|                     <span><strong>Preview of the text that is used for changedetection after all filters run.</strong></span><br> | ||||
|                     {#<div id="text-preview-controls"><span id="text-preview-refresh" class="pure-button button-xsmall">Refresh</span></div>#} | ||||
|                 <p> | ||||
|                     <div id="text-preview-inner"></div> | ||||
|                 </p> | ||||
|             </div> | ||||
|             {% endif %} | ||||
|  | ||||
|           </div> | ||||
|         </div> | ||||
|         {% endif %} | ||||
|         {# rendered sub Template #} | ||||
|         {% if extra_form_content %} | ||||
|             <div class="tab-pane-inner" id="extras_tab"> | ||||
|             {{ extra_form_content|safe }} | ||||
|             </div> | ||||
|         {% endif %} | ||||
|             {% if watch['processor'] == 'text_json_diff' %} | ||||
|             <div class="tab-pane-inner visual-selector-ui" id="visualselector"> | ||||
|                 <img class="beta-logo" src="{{url_for('static_content', group='images', filename='beta-logo.png')}}" alt="New beta functionality"> | ||||
| @@ -432,9 +444,8 @@ Unavailable") }} | ||||
|                 <fieldset> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {% if visualselector_enabled %} | ||||
|                             <span class="pure-form-message-inline"> | ||||
|                                 The Visual Selector tool lets you select the <i>text</i> elements that will be used for the change detection ‐ after the <i>Browser Steps</i> has completed.<br> | ||||
|                                 This tool is a helper to manage filters in the  "CSS/JSONPath/JQ/XPath Filters" box of the <a href="#filters-and-triggers">Filters & Triggers</a> tab. | ||||
|                             <span class="pure-form-message-inline" id="visual-selector-heading"> | ||||
|                                 The Visual Selector tool lets you select the <i>text</i> elements that will be used for the change detection. It automatically fills-in the filters in the "CSS/JSONPath/JQ/XPath Filters" box of the <a href="#filters-and-triggers">Filters & Triggers</a> tab. Use <strong>Shift+Click</strong> to select multiple items. | ||||
|                             </span> | ||||
|  | ||||
|                             <div id="selector-header"> | ||||
| @@ -495,6 +506,12 @@ Unavailable") }} | ||||
|                         </tr> | ||||
|                         </tbody> | ||||
|                     </table> | ||||
|                     {% if watch.history_n %} | ||||
|                         <p> | ||||
|                              <a href="{{url_for('watch_get_latest_html', uuid=uuid)}}" class="pure-button button-small">Download latest HTML snapshot</a> | ||||
|                         </p> | ||||
|                     {% endif %} | ||||
|  | ||||
|                 </div> | ||||
|             </div> | ||||
|             <div id="actions"> | ||||
|   | ||||
| @@ -76,7 +76,7 @@ | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_checkbox_field(form.application.form.empty_pages_are_a_change) }} | ||||
|                         <span class="pure-form-message-inline">When a page contains HTML, but no renderable text appears (empty page), is this considered a change?</span> | ||||
|                         <span class="pure-form-message-inline">When a request returns no content, or the HTML does not contain any text, is this considered a change?</span> | ||||
|                     </div> | ||||
|                 {% if form.requests.proxy %} | ||||
|                     <div class="pure-control-group inline-radio"> | ||||
| @@ -92,7 +92,7 @@ | ||||
|             <div class="tab-pane-inner" id="notifications"> | ||||
|                 <fieldset> | ||||
|                     <div class="field-group"> | ||||
|                         {{ render_common_settings_form(form.application.form, emailprefix, settings_application) }} | ||||
|                         {{ render_common_settings_form(form.application.form, emailprefix, settings_application, extra_notification_token_placeholder_info) }} | ||||
|                     </div> | ||||
|                 </fieldset> | ||||
|                 <div class="pure-control-group" id="notification-base-url"> | ||||
| @@ -155,11 +155,13 @@ | ||||
|                       {{ render_field(form.application.form.global_subtractive_selectors, rows=5, placeholder="header | ||||
| footer | ||||
| nav | ||||
| .stockticker") }} | ||||
| .stockticker | ||||
| //*[contains(text(), 'Advertisement')]") }} | ||||
|                       <span class="pure-form-message-inline"> | ||||
|                         <ul> | ||||
|                           <li> Remove HTML element(s) by CSS selector before text conversion. </li> | ||||
|                           <li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li> | ||||
|                           <li> Remove HTML element(s) by CSS and XPath selectors before text conversion. </li> | ||||
|                           <li> Don't paste HTML here, use only CSS and XPath selectors </li> | ||||
|                           <li> Add multiple elements, CSS or XPath selectors per line to ignore multiple parts of the HTML. </li> | ||||
|                         </ul> | ||||
|                       </span> | ||||
|                     </fieldset> | ||||
|   | ||||
| @@ -59,6 +59,11 @@ | ||||
|     {% set sort_order = sort_order or 'asc' %} | ||||
|     {% set sort_attribute = sort_attribute or 'last_changed'  %} | ||||
|     {% set pagination_page = request.args.get('page', 0) %} | ||||
|     {% set cols_required = 6 %} | ||||
|     {% set any_has_restock_price_processor = datastore.any_watches_have_processor_by_name("restock_diff") %} | ||||
|     {% if any_has_restock_price_processor %} | ||||
|         {% set cols_required = cols_required + 1 %} | ||||
|     {% endif %} | ||||
|  | ||||
|     <div id="watch-table-wrapper"> | ||||
|  | ||||
| @@ -68,17 +73,20 @@ | ||||
|                 {% set link_order = "desc" if sort_order  == 'asc' else "asc" %} | ||||
|                 {% set arrow_span = "" %} | ||||
|                 <th><input style="vertical-align: middle" type="checkbox" id="check-all" > <a class="{{ 'active '+link_order if sort_attribute == 'date_created' else 'inactive' }}"  href="{{url_for('index', sort='date_created', order=link_order, tag=active_tag_uuid)}}"># <span class='arrow {{link_order}}'></span></a></th> | ||||
|                 <th></th> | ||||
|                 <th class="empty-cell"></th> | ||||
|                 <th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('index', sort='label', order=link_order, tag=active_tag_uuid)}}">Website <span class='arrow {{link_order}}'></span></a></th> | ||||
|              {% if any_has_restock_price_processor %} | ||||
|                 <th>Restock & Price</th> | ||||
|              {% endif %} | ||||
|                 <th><a class="{{ 'active '+link_order if sort_attribute == 'last_checked' else 'inactive' }}" href="{{url_for('index', sort='last_checked', order=link_order, tag=active_tag_uuid)}}">Last Checked <span class='arrow {{link_order}}'></span></a></th> | ||||
|                 <th><a class="{{ 'active '+link_order if sort_attribute == 'last_changed' else 'inactive' }}" href="{{url_for('index', sort='last_changed', order=link_order, tag=active_tag_uuid)}}">Last Changed <span class='arrow {{link_order}}'></span></a></th> | ||||
|                 <th></th> | ||||
|                 <th class="empty-cell"></th> | ||||
|             </tr> | ||||
|             </thead> | ||||
|             <tbody> | ||||
|             {% if not watches|length %} | ||||
|             <tr> | ||||
|                 <td colspan="6" style="text-wrap: wrap;">No website watches configured, please add a URL in the box above, or <a href="{{ url_for('import_page')}}" >import a list</a>.</td> | ||||
|                 <td colspan="{{ cols_required }}" style="text-wrap: wrap;">No website watches configured, please add a URL in the box above, or <a href="{{ url_for('import_page')}}" >import a list</a>.</td> | ||||
|             </tr> | ||||
|             {% endif %} | ||||
|             {% for watch in (watches|sort(attribute=sort_attribute, reverse=sort_order == 'asc'))|pagination_slice(skip=pagination.skip) %} | ||||
| @@ -91,6 +99,7 @@ | ||||
|                 {% if watch.last_notification_error is defined and watch.last_notification_error != False %}error{% endif %} | ||||
|                 {% if watch.paused is defined and watch.paused != False %}paused{% endif %} | ||||
|                 {% if is_unviewed %}unviewed{% endif %} | ||||
|                 {% if watch.has_restock_info %} has-restock-info {% if watch['restock']['in_stock'] %}in-stock{% else %}not-in-stock{% endif %} {% else %}no-restock-info{% endif %} | ||||
|                 {% if watch.uuid in queued_uuids %}queued{% endif %}"> | ||||
|                 <td class="inline checkbox-uuid" ><input name="uuids"  type="checkbox" value="{{ watch.uuid}} " > <span>{{ loop.index+pagination.skip }}</span></td> | ||||
|                 <td class="inline watch-controls"> | ||||
| @@ -135,30 +144,39 @@ | ||||
|  | ||||
|                     {% if watch['processor'] == 'text_json_diff'  %} | ||||
|                         {% if watch['has_ldjson_price_data'] and not watch['track_ldjson_price_data']  %} | ||||
|                         <div class="ldjson-price-track-offer">Embedded price data detected, follow only price data? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div> | ||||
|                         <div class="ldjson-price-track-offer">Switch to Restock & Price watch mode? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div> | ||||
|                         {% endif %} | ||||
|                         {% if watch['track_ldjson_price_data'] == 'accepted' %} | ||||
|                     {% endif %} | ||||
|                     {% if watch['processor'] == 'restock_diff' %} | ||||
|                         <span class="tracking-ldjson-price-data" title="Automatically following embedded price information"><img src="{{url_for('static_content', group='images', filename='price-tag-icon.svg')}}"  class="status-icon price-follow-tag-icon" > Price</span> | ||||
|                         {% endif %} | ||||
|                     {% endif %} | ||||
|  | ||||
|                     {% if watch['processor'] == 'restock_diff'  %} | ||||
|                     <span class="restock-label {{'in-stock' if watch['in_stock'] else 'not-in-stock' }}" title="detecting restock conditions"> | ||||
|                         <!-- maybe some object watch['processor'][restock_diff] or.. --> | ||||
|                         {% if watch['last_checked'] and watch['in_stock'] != None %} | ||||
|                             {% if watch['in_stock'] %} In stock {% else %} Not in stock {% endif %} | ||||
|                         {% else %} | ||||
|                             Not yet checked | ||||
|                         {% endif %} | ||||
|                     </span> | ||||
|                     {% endif %} | ||||
|  | ||||
|  | ||||
|                     {% for watch_tag_uuid, watch_tag in datastore.get_all_tags_for_watch(watch['uuid']).items() %} | ||||
|                       <span class="watch-tag-list">{{ watch_tag.title }}</span> | ||||
|                     {% endfor %} | ||||
|  | ||||
|                 </td> | ||||
|             <!-- @todo make it so any watch handler obj can expose this ---> | ||||
| {% if any_has_restock_price_processor %} | ||||
|                 <td class="restock-and-price"> | ||||
|                     {% if watch['processor'] == 'restock_diff'  %} | ||||
|                         {% if watch.has_restock_info %} | ||||
|                             <span class="restock-label {{'in-stock' if watch['restock']['in_stock'] else 'not-in-stock' }}" title="Detecting restock and price"> | ||||
|                                 <!-- maybe some object watch['processor'][restock_diff] or.. --> | ||||
|                                  {% if watch['restock']['in_stock'] %} In stock {% else %} Not in stock {% endif %} | ||||
|                             </span> | ||||
|                         {% endif %} | ||||
|  | ||||
|                         {% if watch.get('restock') and watch['restock']['price'] != None %} | ||||
|                             {% if watch['restock']['price'] != None %} | ||||
|                                 <span class="restock-label price" title="Price"> | ||||
|                                 {{ watch['restock']['price']|format_number_locale }} {{ watch['restock']['currency'] }} | ||||
|                                 </span> | ||||
|                             {% endif %} | ||||
|                         {% elif not watch.has_restock_info %} | ||||
|                             <span class="restock-label error">No information</span> | ||||
|                         {% endif %} | ||||
|                     {% endif %} | ||||
|                 </td> | ||||
| {% endif %} | ||||
|                 <td class="last-checked" data-timestamp="{{ watch.last_checked }}">{{watch|format_last_checked_time|safe}}</td> | ||||
|                 <td class="last-changed" data-timestamp="{{ watch.last_changed }}">{% if watch.history_n >=2 and watch.last_changed >0 %} | ||||
|                     {{watch.last_changed|format_timestamp_timeago}} | ||||
|   | ||||
| @@ -1,4 +1,7 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
| import resource | ||||
| import time | ||||
| from threading import Thread | ||||
|  | ||||
| import pytest | ||||
| from changedetectionio import changedetection_app | ||||
| @@ -23,6 +26,36 @@ def reportlog(pytestconfig): | ||||
|     yield | ||||
|     logger.remove(handler_id) | ||||
|  | ||||
|  | ||||
| def track_memory(memory_usage, ): | ||||
|     while not memory_usage["stop"]: | ||||
|         max_rss = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss | ||||
|         memory_usage["peak"] = max(memory_usage["peak"], max_rss) | ||||
|         time.sleep(0.01)  # Adjust the sleep time as needed | ||||
|  | ||||
| @pytest.fixture(scope='function') | ||||
| def measure_memory_usage(request): | ||||
|     memory_usage = {"peak": 0, "stop": False} | ||||
|     tracker_thread = Thread(target=track_memory, args=(memory_usage,)) | ||||
|     tracker_thread.start() | ||||
|  | ||||
|     yield | ||||
|  | ||||
|     memory_usage["stop"] = True | ||||
|     tracker_thread.join() | ||||
|  | ||||
|     # Note: ru_maxrss is in kilobytes on Unix-based systems | ||||
|     max_memory_used = memory_usage["peak"] / 1024  # Convert to MB | ||||
|     s = f"Peak memory used by the test {request.node.fspath} - '{request.node.name}': {max_memory_used:.2f} MB" | ||||
|     logger.debug(s) | ||||
|  | ||||
|     with open("test-memory.log", 'a') as f: | ||||
|         f.write(f"{s}\n") | ||||
|  | ||||
|     # Assert that the memory usage is less than 200MB | ||||
| #    assert max_memory_used < 150, f"Memory usage exceeded 200MB: {max_memory_used:.2f} MB" | ||||
|  | ||||
|  | ||||
| def cleanup(datastore_path): | ||||
|     import glob | ||||
|     # Unlink test output files | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| # !/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
| import os | ||||
|  | ||||
| from flask import url_for | ||||
| @@ -77,13 +77,13 @@ def do_test(client, live_server, make_test_use_extra_browser=False): | ||||
|  | ||||
|  | ||||
| # Requires playwright to be installed | ||||
| def test_request_via_custom_browser_url(client, live_server): | ||||
| def test_request_via_custom_browser_url(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|     # We do this so we can grep the logs of the custom container and see if the request actually went through that container | ||||
|     do_test(client, live_server, make_test_use_extra_browser=True) | ||||
|  | ||||
|  | ||||
| def test_request_not_via_custom_browser_url(client, live_server): | ||||
| def test_request_not_via_custom_browser_url(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|     # We do this so we can grep the logs of the custom container and see if the request actually went through that container | ||||
|     do_test(client, live_server, make_test_use_extra_browser=False) | ||||
|   | ||||
| @@ -1,3 +1,3 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| from .. import conftest | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| @@ -6,7 +6,7 @@ from ..util import live_server_setup, wait_for_all_checks | ||||
| import logging | ||||
|  | ||||
| # Requires playwright to be installed | ||||
| def test_fetch_webdriver_content(client, live_server): | ||||
| def test_fetch_webdriver_content(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
|     ##################### | ||||
|   | ||||
| @@ -3,7 +3,7 @@ from flask import url_for | ||||
| from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client | ||||
|  | ||||
|  | ||||
| def test_execute_custom_js(client, live_server): | ||||
| def test_execute_custom_js(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     live_server_setup(live_server) | ||||
|     assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test" | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| from .. import conftest | ||||
|  | ||||
|   | ||||
| @@ -1,11 +1,11 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import os | ||||
| from flask import url_for | ||||
| from ..util import live_server_setup, wait_for_all_checks | ||||
|  | ||||
|  | ||||
| def test_preferred_proxy(client, live_server): | ||||
| def test_preferred_proxy(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|     url = "http://chosen.changedetection.io" | ||||
|  | ||||
|   | ||||
| @@ -1,11 +1,11 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client | ||||
|  | ||||
|  | ||||
| def test_noproxy_option(client, live_server): | ||||
| def test_noproxy_option(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|     # Run by run_proxy_tests.sh | ||||
|     # Call this URL then scan the containers that it never went through them | ||||
|   | ||||
| @@ -1,11 +1,11 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client | ||||
|  | ||||
| # just make a request, we will grep in the docker logs to see it actually got called | ||||
| def test_check_basic_change_detection_functionality(client, live_server): | ||||
| def test_check_basic_change_detection_functionality(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|     res = client.post( | ||||
|         url_for("import_page"), | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| @@ -6,7 +6,7 @@ from ..util import live_server_setup, wait_for_all_checks | ||||
| import os | ||||
|  | ||||
| # just make a request, we will grep in the docker logs to see it actually got called | ||||
| def test_select_custom(client, live_server): | ||||
| def test_select_custom(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
|     # Goto settings, add our custom one | ||||
|   | ||||
| @@ -1,12 +1,27 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
| import os | ||||
| import time | ||||
| from flask import url_for | ||||
| from changedetectionio.tests.util import live_server_setup, wait_for_all_checks | ||||
|  | ||||
|  | ||||
| def test_socks5(client, live_server): | ||||
| def set_response(): | ||||
|     import time | ||||
|     data = f"""<html> | ||||
|        <body> | ||||
|      <h1>Awesome, you made it</h1> | ||||
|      yeah the socks request worked | ||||
|      </body> | ||||
|      </html> | ||||
|     """ | ||||
|  | ||||
|     with open("test-datastore/endpoint-content.txt", "w") as f: | ||||
|         f.write(data) | ||||
|     time.sleep(1) | ||||
|  | ||||
|  | ||||
| def test_socks5(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|     set_response() | ||||
|  | ||||
|     # Setup a proxy | ||||
|     res = client.post( | ||||
| @@ -24,7 +39,10 @@ def test_socks5(client, live_server): | ||||
|  | ||||
|     assert b"Settings updated." in res.data | ||||
|  | ||||
|     test_url = "https://changedetection.io/CHANGELOG.txt?socks-test-tag=" + os.getenv('SOCKSTEST', '') | ||||
|     # Because the socks server should connect back to us | ||||
|     test_url = url_for('test_endpoint', _external=True) + f"?socks-test-tag={os.getenv('SOCKSTEST', '')}" | ||||
|     test_url = test_url.replace('localhost.localdomain', 'cdio') | ||||
|     test_url = test_url.replace('localhost', 'cdio') | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("form_quick_watch_add"), | ||||
| @@ -60,4 +78,4 @@ def test_socks5(client, live_server): | ||||
|     ) | ||||
|  | ||||
|     # Should see the proper string | ||||
|     assert "+0200:".encode('utf-8') in res.data | ||||
|     assert "Awesome, you made it".encode('utf-8') in res.data | ||||
|   | ||||
| @@ -1,16 +1,32 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
| import os | ||||
| import time | ||||
| from flask import url_for | ||||
| from changedetectionio.tests.util import live_server_setup, wait_for_all_checks | ||||
|  | ||||
|  | ||||
| def set_response(): | ||||
|     import time | ||||
|     data = f"""<html> | ||||
|        <body> | ||||
|      <h1>Awesome, you made it</h1> | ||||
|      yeah the socks request worked | ||||
|      </body> | ||||
|      </html> | ||||
|     """ | ||||
|  | ||||
|     with open("test-datastore/endpoint-content.txt", "w") as f: | ||||
|         f.write(data) | ||||
|     time.sleep(1) | ||||
|  | ||||
| # should be proxies.json mounted from run_proxy_tests.sh already | ||||
| # -v `pwd`/tests/proxy_socks5/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json | ||||
| def test_socks5_from_proxiesjson_file(client, live_server): | ||||
| def test_socks5_from_proxiesjson_file(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
|     test_url = "https://changedetection.io/CHANGELOG.txt?socks-test-tag=" + os.getenv('SOCKSTEST', '') | ||||
|     set_response() | ||||
|     # Because the socks server should connect back to us | ||||
|     test_url = url_for('test_endpoint', _external=True) + f"?socks-test-tag={os.getenv('SOCKSTEST', '')}" | ||||
|     test_url = test_url.replace('localhost.localdomain', 'cdio') | ||||
|     test_url = test_url.replace('localhost', 'cdio') | ||||
|  | ||||
|     res = client.get(url_for("settings_page")) | ||||
|     assert b'name="requests-proxy" type="radio" value="socks5proxy"' in res.data | ||||
| @@ -49,4 +65,4 @@ def test_socks5_from_proxiesjson_file(client, live_server): | ||||
|     ) | ||||
|  | ||||
|     # Should see the proper string | ||||
|     assert "+0200:".encode('utf-8') in res.data | ||||
|     assert "Awesome, you made it".encode('utf-8') in res.data | ||||
|   | ||||
| @@ -1,3 +1,3 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| from .. import conftest | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
| import os | ||||
| import time | ||||
| from flask import url_for | ||||
| from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client | ||||
| from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client, wait_for_notification_endpoint_output | ||||
| from changedetectionio.notification import ( | ||||
|     default_notification_body, | ||||
|     default_notification_format, | ||||
| @@ -48,7 +48,7 @@ def set_back_in_stock_response(): | ||||
|     return None | ||||
|  | ||||
| # Add a site in paused mode, add an invalid filter, we should still have visual selector data ready | ||||
| def test_restock_detection(client, live_server): | ||||
| def test_restock_detection(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     set_original_response() | ||||
|     #assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test" | ||||
| @@ -94,7 +94,7 @@ def test_restock_detection(client, live_server): | ||||
|     assert b'not-in-stock' not in res.data | ||||
|  | ||||
|     # We should have a notification | ||||
|     time.sleep(2) | ||||
|     wait_for_notification_endpoint_output() | ||||
|     assert os.path.isfile("test-datastore/notification.txt"), "Notification received" | ||||
|     os.unlink("test-datastore/notification.txt") | ||||
|  | ||||
| @@ -103,6 +103,7 @@ def test_restock_detection(client, live_server): | ||||
|     set_original_response() | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     wait_for_all_checks(client) | ||||
|     time.sleep(5) | ||||
|     assert not os.path.isfile("test-datastore/notification.txt"), "No notification should have fired when it went OUT OF STOCK by default" | ||||
|  | ||||
|     # BUT we should see that it correctly shows "not in stock" | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
| import asyncio | ||||
| from aiosmtpd.controller import Controller | ||||
| from aiosmtpd.smtp import SMTP | ||||
|   | ||||
| @@ -40,7 +40,7 @@ def get_last_message_from_smtp_server(): | ||||
|  | ||||
| # Requires running the test SMTP server | ||||
|  | ||||
| def test_check_notification_email_formats_default_HTML(client, live_server): | ||||
| def test_check_notification_email_formats_default_HTML(client, live_server, measure_memory_usage): | ||||
|     # live_server_setup(live_server) | ||||
|     set_original_response() | ||||
|  | ||||
| @@ -92,7 +92,7 @@ def test_check_notification_email_formats_default_HTML(client, live_server): | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
|  | ||||
| def test_check_notification_email_formats_default_Text_override_HTML(client, live_server): | ||||
| def test_check_notification_email_formats_default_Text_override_HTML(client, live_server, measure_memory_usage): | ||||
|     # live_server_setup(live_server) | ||||
|  | ||||
|     # HTML problems? see this | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
| import os.path | ||||
| import time | ||||
| from flask import url_for | ||||
| from .util import live_server_setup, wait_for_all_checks | ||||
| from .util import live_server_setup, wait_for_all_checks, wait_for_notification_endpoint_output | ||||
| from changedetectionio import html_tools | ||||
|  | ||||
|  | ||||
| @@ -35,10 +35,10 @@ def set_original(excluding=None, add_line=None): | ||||
|     with open("test-datastore/endpoint-content.txt", "w") as f: | ||||
|         f.write(test_return_data) | ||||
|  | ||||
| def test_setup(client, live_server): | ||||
| def test_setup(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
| def test_check_removed_line_contains_trigger(client, live_server): | ||||
| def test_check_removed_line_contains_trigger(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     # Give the endpoint time to spin up | ||||
|     time.sleep(1) | ||||
| @@ -103,7 +103,7 @@ def test_check_removed_line_contains_trigger(client, live_server): | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
|  | ||||
| def test_check_add_line_contains_trigger(client, live_server): | ||||
| def test_check_add_line_contains_trigger(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|  | ||||
|     # Give the endpoint time to spin up | ||||
| @@ -112,7 +112,7 @@ def test_check_add_line_contains_trigger(client, live_server): | ||||
|     res = client.post( | ||||
|         url_for("settings_page"), | ||||
|         data={"application-notification_title": "New ChangeDetection.io Notification - {{ watch_url }}", | ||||
|               "application-notification_body": 'triggered text was -{{triggered_text}}-', | ||||
|               "application-notification_body": 'triggered text was -{{triggered_text}}- 网站监测 内容更新了', | ||||
|               # https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation | ||||
|               "application-notification_urls": test_notification_url, | ||||
|               "application-minutes_between_check": 180, | ||||
| @@ -140,6 +140,7 @@ def test_check_add_line_contains_trigger(client, live_server): | ||||
|         url_for("edit_page", uuid="first"), | ||||
|         data={"trigger_text": 'Oh yes please', | ||||
|               "url": test_url, | ||||
|               'processor': 'text_json_diff', | ||||
|               'fetch_backend': "html_requests", | ||||
|               'filter_text_removed': '', | ||||
|               'filter_text_added': 'y'}, | ||||
| @@ -164,11 +165,12 @@ def test_check_add_line_contains_trigger(client, live_server): | ||||
|     assert b'unviewed' in res.data | ||||
|  | ||||
|     # Takes a moment for apprise to fire | ||||
|     time.sleep(3) | ||||
|     wait_for_notification_endpoint_output() | ||||
|     assert os.path.isfile("test-datastore/notification.txt"), "Notification fired because I can see the output file" | ||||
|     with open("test-datastore/notification.txt", 'r') as f: | ||||
|         response= f.read() | ||||
|         assert '-Oh yes please-' in response | ||||
|     with open("test-datastore/notification.txt", 'rb') as f: | ||||
|         response = f.read() | ||||
|         assert b'-Oh yes please-' in response | ||||
|         assert '网站监测 内容更新了'.encode('utf-8') in response | ||||
|  | ||||
|  | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| @@ -53,10 +53,10 @@ def is_valid_uuid(val): | ||||
|         return False | ||||
|  | ||||
|  | ||||
| def test_setup(client, live_server): | ||||
| def test_setup(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
| def test_api_simple(client, live_server): | ||||
| def test_api_simple(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|  | ||||
|     api_key = extract_api_key_from_UI(client) | ||||
| @@ -149,6 +149,15 @@ def test_api_simple(client, live_server): | ||||
|         headers={'x-api-key': api_key}, | ||||
|     ) | ||||
|     assert b'which has this one new line' in res.data | ||||
|     assert b'<div id' not in res.data | ||||
|  | ||||
|     # Fetch the HTML of the latest one | ||||
|     res = client.get( | ||||
|         url_for("watchsinglehistory", uuid=watch_uuid, timestamp='latest')+"?html=1", | ||||
|         headers={'x-api-key': api_key}, | ||||
|     ) | ||||
|     assert b'which has this one new line' in res.data | ||||
|     assert b'<div id' in res.data | ||||
|  | ||||
|     # Fetch the whole watch | ||||
|     res = client.get( | ||||
| @@ -232,7 +241,7 @@ def test_api_simple(client, live_server): | ||||
|     ) | ||||
|     assert len(res.json) == 0, "Watch list should be empty" | ||||
|  | ||||
| def test_access_denied(client, live_server): | ||||
| def test_access_denied(client, live_server, measure_memory_usage): | ||||
|     # `config_api_token_enabled` Should be On by default | ||||
|     res = client.get( | ||||
|         url_for("createwatch") | ||||
| @@ -278,7 +287,7 @@ def test_access_denied(client, live_server): | ||||
|     ) | ||||
|     assert b"Settings updated." in res.data | ||||
|  | ||||
| def test_api_watch_PUT_update(client, live_server): | ||||
| def test_api_watch_PUT_update(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     #live_server_setup(live_server) | ||||
|     api_key = extract_api_key_from_UI(client) | ||||
| @@ -360,7 +369,7 @@ def test_api_watch_PUT_update(client, live_server): | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
|  | ||||
| def test_api_import(client, live_server): | ||||
| def test_api_import(client, live_server, measure_memory_usage): | ||||
|     api_key = extract_api_key_from_UI(client) | ||||
|  | ||||
|     res = client.post( | ||||
|   | ||||
| @@ -1,11 +1,11 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| from .util import live_server_setup, wait_for_all_checks | ||||
|  | ||||
|  | ||||
| def test_basic_auth(client, live_server): | ||||
| def test_basic_auth(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| @@ -76,12 +76,12 @@ def set_response_without_ldjson(): | ||||
|         f.write(test_return_data) | ||||
|     return None | ||||
|  | ||||
| def test_setup(client, live_server): | ||||
| def test_setup(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
| # actually only really used by the distll.io importer, but could be handy too | ||||
| def test_check_ldjson_price_autodetect(client, live_server): | ||||
|  | ||||
| def test_check_ldjson_price_autodetect(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|     set_response_with_ldjson() | ||||
|  | ||||
|     # Add our URL to the import page | ||||
| @@ -100,12 +100,8 @@ def test_check_ldjson_price_autodetect(client, live_server): | ||||
|  | ||||
|     # Accept it | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|     time.sleep(1) | ||||
|     #time.sleep(1) | ||||
|     client.get(url_for('price_data_follower.accept', uuid=uuid, follow_redirects=True)) | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # Trigger a check | ||||
|     time.sleep(1) | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     wait_for_all_checks(client) | ||||
|     # Offer should be gone | ||||
| @@ -120,8 +116,8 @@ def test_check_ldjson_price_autodetect(client, live_server): | ||||
|         headers={'x-api-key': api_key}, | ||||
|     ) | ||||
|  | ||||
|     # Should see this (dont know where the whitespace came from) | ||||
|     assert b'"highPrice": 8099900' in res.data | ||||
|     assert b'8097000' in res.data | ||||
|  | ||||
|     # And not this cause its not the ld-json | ||||
|     assert b"So let's see what happens" not in res.data | ||||
|  | ||||
| @@ -160,14 +156,14 @@ def _test_runner_check_bad_format_ignored(live_server, client, has_ldjson_price_ | ||||
|  | ||||
|     for k,v in client.application.config.get('DATASTORE').data['watching'].items(): | ||||
|         assert v.get('last_error') == False | ||||
|         assert v.get('has_ldjson_price_data') == has_ldjson_price_data | ||||
|         assert v.get('has_ldjson_price_data') == has_ldjson_price_data, f"Detected LDJSON data? should be {has_ldjson_price_data}" | ||||
|  | ||||
|  | ||||
|     ########################################################################################## | ||||
|     client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|  | ||||
|  | ||||
| def test_bad_ldjson_is_correctly_ignored(client, live_server): | ||||
| def test_bad_ldjson_is_correctly_ignored(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|     test_return_data = """ | ||||
|             <html> | ||||
| @@ -201,35 +197,37 @@ def test_bad_ldjson_is_correctly_ignored(client, live_server): | ||||
|         f.write(test_return_data) | ||||
|  | ||||
|     _test_runner_check_bad_format_ignored(live_server=live_server, client=client, has_ldjson_price_data=True) | ||||
|     test_return_data = """ | ||||
|             <html> | ||||
|             <head> | ||||
|                 <script type="application/ld+json"> | ||||
|                     { | ||||
|                         "@context": "http://schema.org", | ||||
|                         "@type": ["Product", "SubType"], | ||||
|                         "name": "My test product", | ||||
|                         "description": "", | ||||
|                         "BrokenOffers": { | ||||
|                             "@type": "Offer", | ||||
|                             "offeredBy": { | ||||
|                                 "@type": "Organization", | ||||
|                                 "name":"Person", | ||||
|                                 "telephone":"+1 999 999 999" | ||||
|                             }, | ||||
|                             "price": "1", | ||||
|                             "priceCurrency": "EUR", | ||||
|                             "url": "/some/url" | ||||
|                         } | ||||
|                     } | ||||
|                 </script> | ||||
|             </head> | ||||
|             <body> | ||||
|             <div class="yes">Some extra stuff</div> | ||||
|             </body></html> | ||||
|      """ | ||||
|     with open("test-datastore/endpoint-content.txt", "w") as f: | ||||
|         f.write(test_return_data) | ||||
|  | ||||
|     _test_runner_check_bad_format_ignored(live_server=live_server, client=client, has_ldjson_price_data=False) | ||||
|     # This is OK that it offers a suggestion in this case, the processor will let them know more about something wrong | ||||
|  | ||||
|     # test_return_data = """ | ||||
|     #         <html> | ||||
|     #         <head> | ||||
|     #             <script type="application/ld+json"> | ||||
|     #                 { | ||||
|     #                     "@context": "http://schema.org", | ||||
|     #                     "@type": ["Product", "SubType"], | ||||
|     #                     "name": "My test product", | ||||
|     #                     "description": "", | ||||
|     #                     "BrokenOffers": { | ||||
|     #                         "@type": "Offer", | ||||
|     #                         "offeredBy": { | ||||
|     #                             "@type": "Organization", | ||||
|     #                             "name":"Person", | ||||
|     #                             "telephone":"+1 999 999 999" | ||||
|     #                         }, | ||||
|     #                         "price": "1", | ||||
|     #                         "priceCurrency": "EUR", | ||||
|     #                         "url": "/some/url" | ||||
|     #                     } | ||||
|     #                 } | ||||
|     #             </script> | ||||
|     #         </head> | ||||
|     #         <body> | ||||
|     #         <div class="yes">Some extra stuff</div> | ||||
|     #         </body></html> | ||||
|     #  """ | ||||
|     # with open("test-datastore/endpoint-content.txt", "w") as f: | ||||
|     #     f.write(test_return_data) | ||||
|     # | ||||
|     # _test_runner_check_bad_format_ignored(live_server=live_server, client=client, has_ldjson_price_data=False) | ||||
|   | ||||
| @@ -1,9 +1,10 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| from urllib.request import urlopen | ||||
| from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI | ||||
| from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, \ | ||||
|     extract_UUID_from_client | ||||
|  | ||||
| sleep_time_for_fetch_thread = 3 | ||||
|  | ||||
| @@ -16,7 +17,7 @@ def test_inscriptus(): | ||||
|     assert stripped_text_from_html == 'test!\nok man' | ||||
|  | ||||
|  | ||||
| def test_check_basic_change_detection_functionality(client, live_server): | ||||
| def test_check_basic_change_detection_functionality(client, live_server, measure_memory_usage): | ||||
|     set_original_response() | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
| @@ -68,6 +69,12 @@ def test_check_basic_change_detection_functionality(client, live_server): | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|  | ||||
|     # Check the 'get latest snapshot works' | ||||
|     res = client.get(url_for("watch_get_latest_html", uuid=uuid)) | ||||
|     assert b'which has this one new line' in res.data | ||||
|  | ||||
|     # Now something should be ready, indicated by having a 'unviewed' class | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b'unviewed' in res.data | ||||
| @@ -85,7 +92,7 @@ def test_check_basic_change_detection_functionality(client, live_server): | ||||
|     assert expected_url.encode('utf-8') in res.data | ||||
|  | ||||
|     # Following the 'diff' link, it should no longer display as 'unviewed' even after we recheck it a few times | ||||
|     res = client.get(url_for("diff_history_page", uuid="first")) | ||||
|     res = client.get(url_for("diff_history_page", uuid=uuid)) | ||||
|     assert b'selected=""' in res.data, "Confirm diff history page loaded" | ||||
|  | ||||
|     # Check the [preview] pulls the right one | ||||
| @@ -141,6 +148,13 @@ def test_check_basic_change_detection_functionality(client, live_server): | ||||
|     assert b'Mark all viewed' not in res.data | ||||
|     assert b'unviewed' not in res.data | ||||
|  | ||||
|     # #2458 "clear history" should make the Watch object update its status correctly when the first snapshot lands again | ||||
|     client.get(url_for("clear_watch_history", uuid=uuid)) | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     wait_for_all_checks(client) | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b'preview/' in res.data | ||||
|  | ||||
|     # | ||||
|     # Cleanup everything | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| from .util import set_original_response, live_server_setup, wait_for_all_checks | ||||
| from flask import url_for | ||||
| @@ -8,7 +8,7 @@ import re | ||||
| import time | ||||
|  | ||||
|  | ||||
| def test_backup(client, live_server): | ||||
| def test_backup(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
|     set_original_response() | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| @@ -60,7 +60,7 @@ def set_modified_response_minus_block_text(): | ||||
|         f.write(test_return_data) | ||||
|  | ||||
|  | ||||
| def test_check_block_changedetection_text_NOT_present(client, live_server): | ||||
| def test_check_block_changedetection_text_NOT_present(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     live_server_setup(live_server) | ||||
|     # Use a mix of case in ZzZ to prove it works case-insensitive. | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| @@ -6,7 +6,7 @@ from . util import live_server_setup | ||||
|  | ||||
|  | ||||
|  | ||||
| def test_trigger_functionality(client, live_server): | ||||
| def test_trigger_functionality(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| @@ -70,7 +70,7 @@ def test_include_filters_output(): | ||||
|  | ||||
|  | ||||
| # Tests the whole stack works with the CSS Filter | ||||
| def test_check_markup_include_filters_restriction(client, live_server): | ||||
| def test_check_markup_include_filters_restriction(client, live_server, measure_memory_usage): | ||||
|     sleep_time_for_fetch_thread = 3 | ||||
|  | ||||
|     include_filters = "#sametext" | ||||
| @@ -124,7 +124,7 @@ def test_check_markup_include_filters_restriction(client, live_server): | ||||
|  | ||||
|  | ||||
| # Tests the whole stack works with the CSS Filter | ||||
| def test_check_multiple_filters(client, live_server): | ||||
| def test_check_multiple_filters(client, live_server, measure_memory_usage): | ||||
|     sleep_time_for_fetch_thread = 3 | ||||
|  | ||||
|     include_filters = "#blob-a\r\nxpath://*[contains(@id,'blob-b')]" | ||||
| @@ -180,7 +180,7 @@ def test_check_multiple_filters(client, live_server): | ||||
| # The filter exists, but did not contain anything useful | ||||
| # Mainly used when the filter contains just an IMG, this can happen when someone selects an image in the visual-selector | ||||
| # Tests fetcher can throw a "ReplyWithContentButNoText" exception after applying filter and extracting text | ||||
| def test_filter_is_empty_help_suggestion(client, live_server): | ||||
| def test_filter_is_empty_help_suggestion(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|  | ||||
|     include_filters = "#blob-a" | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
|  | ||||
| @@ -87,6 +87,9 @@ def test_element_removal_output(): | ||||
|      Some initial text<br> | ||||
|      <p>across multiple lines</p> | ||||
|      <div id="changetext">Some text that changes</div> | ||||
|      <div>Some text should be matched by xPath // selector</div> | ||||
|      <div>Some text should be matched by xPath selector</div> | ||||
|      <div>Some text should be matched by xPath1 selector</div> | ||||
|      </body> | ||||
|     <footer> | ||||
|     <p>Footer</p> | ||||
| @@ -94,7 +97,16 @@ def test_element_removal_output(): | ||||
|      </html> | ||||
|     """ | ||||
|     html_blob = element_removal( | ||||
|         ["header", "footer", "nav", "#changetext"], html_content=content | ||||
|       [ | ||||
|         "header", | ||||
|         "footer", | ||||
|         "nav", | ||||
|         "#changetext", | ||||
|         "//*[contains(text(), 'xPath // selector')]", | ||||
|         "xpath://*[contains(text(), 'xPath selector')]", | ||||
|         "xpath1://*[contains(text(), 'xPath1 selector')]" | ||||
|       ], | ||||
|       html_content=content | ||||
|     ) | ||||
|     text = get_text(html_blob) | ||||
|     assert ( | ||||
| @@ -106,7 +118,7 @@ across multiple lines | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def test_element_removal_full(client, live_server): | ||||
| def test_element_removal_full(client, live_server, measure_memory_usage): | ||||
|     sleep_time_for_fetch_thread = 3 | ||||
|  | ||||
|     set_original_response() | ||||
|   | ||||
| @@ -1,9 +1,9 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
| # coding=utf-8 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| from .util import live_server_setup, wait_for_all_checks | ||||
| from .util import live_server_setup, wait_for_all_checks, extract_UUID_from_client | ||||
| import pytest | ||||
|  | ||||
|  | ||||
| @@ -24,7 +24,7 @@ def set_html_response(): | ||||
|  | ||||
|  | ||||
| # In the case the server does not issue a charset= or doesnt have content_type header set | ||||
| def test_check_encoding_detection(client, live_server): | ||||
| def test_check_encoding_detection(client, live_server, measure_memory_usage): | ||||
|     set_html_response() | ||||
|  | ||||
|     # Add our URL to the import page | ||||
| @@ -38,6 +38,11 @@ def test_check_encoding_detection(client, live_server): | ||||
|     # Give the thread time to pick it up | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|  | ||||
|     # Content type recording worked | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|     assert live_server.app.config['DATASTORE'].data['watching'][uuid]['content-type'] == "text/html" | ||||
|  | ||||
|     res = client.get( | ||||
|         url_for("preview_page", uuid="first"), | ||||
|         follow_redirects=True | ||||
| @@ -50,7 +55,7 @@ def test_check_encoding_detection(client, live_server): | ||||
|  | ||||
|  | ||||
| # In the case the server does not issue a charset= or doesnt have content_type header set | ||||
| def test_check_encoding_detection_missing_content_type_header(client, live_server): | ||||
| def test_check_encoding_detection_missing_content_type_header(client, live_server, measure_memory_usage): | ||||
|     set_html_response() | ||||
|  | ||||
|     # Add our URL to the import page | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
|  | ||||
| @@ -54,7 +54,7 @@ def _runner_test_http_errors(client, live_server, http_code, expected_text): | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
|  | ||||
| def test_http_error_handler(client, live_server): | ||||
| def test_http_error_handler(client, live_server, measure_memory_usage): | ||||
|     _runner_test_http_errors(client, live_server, 403, 'Access denied') | ||||
|     _runner_test_http_errors(client, live_server, 404, 'Page not found') | ||||
|     _runner_test_http_errors(client, live_server, 500, '(Internal server error) received') | ||||
| @@ -63,7 +63,7 @@ def test_http_error_handler(client, live_server): | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
| # Just to be sure error text is properly handled | ||||
| def test_DNS_errors(client, live_server): | ||||
| def test_DNS_errors(client, live_server, measure_memory_usage): | ||||
|     # Give the endpoint time to spin up | ||||
|     time.sleep(1) | ||||
|  | ||||
| @@ -87,7 +87,7 @@ def test_DNS_errors(client, live_server): | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
| # Re 1513 | ||||
| def test_low_level_errors_clear_correctly(client, live_server): | ||||
| def test_low_level_errors_clear_correctly(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|     # Give the endpoint time to spin up | ||||
|     time.sleep(1) | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| @@ -9,7 +9,7 @@ sleep_time_for_fetch_thread = 3 | ||||
|  | ||||
|  | ||||
|  | ||||
| def test_check_extract_text_from_diff(client, live_server): | ||||
| def test_check_extract_text_from_diff(client, live_server, measure_memory_usage): | ||||
|     import time | ||||
|     with open("test-datastore/endpoint-content.txt", "w") as f: | ||||
|         f.write("Now it's {} seconds since epoch, time flies!".format(str(time.time()))) | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| @@ -67,10 +67,10 @@ def set_multiline_response(): | ||||
|     return None | ||||
|  | ||||
|  | ||||
| def test_setup(client, live_server): | ||||
| def test_setup(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
| def test_check_filter_multiline(client, live_server): | ||||
| def test_check_filter_multiline(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|     set_multiline_response() | ||||
|  | ||||
| @@ -122,7 +122,7 @@ def test_check_filter_multiline(client, live_server): | ||||
|     # but the last one, which also says 'lines' shouldnt be here (non-greedy match checking) | ||||
|     assert b'aaand something lines' not in res.data | ||||
|  | ||||
| def test_check_filter_and_regex_extract(client, live_server): | ||||
| def test_check_filter_and_regex_extract(client, live_server, measure_memory_usage): | ||||
|      | ||||
|     include_filters = ".changetext" | ||||
|  | ||||
| @@ -205,7 +205,7 @@ def test_check_filter_and_regex_extract(client, live_server): | ||||
|  | ||||
|  | ||||
|  | ||||
| def test_regex_error_handling(client, live_server): | ||||
| def test_regex_error_handling(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     #live_server_setup(live_server) | ||||
|  | ||||
|   | ||||
| @@ -1,10 +1,10 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| # https://www.reddit.com/r/selfhosted/comments/wa89kp/comment/ii3a4g7/?context=3 | ||||
| import os | ||||
| import time | ||||
| from flask import url_for | ||||
| from .util import set_original_response, live_server_setup | ||||
| from .util import set_original_response, live_server_setup, wait_for_notification_endpoint_output | ||||
| from changedetectionio.model import App | ||||
|  | ||||
|  | ||||
| @@ -41,7 +41,7 @@ def set_response_with_filter(): | ||||
|         f.write(test_return_data) | ||||
|     return None | ||||
|  | ||||
| def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_server): | ||||
| def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_server, measure_memory_usage): | ||||
| #  Filter knowingly doesn't exist, like someone setting up a known filter to see if some cinema tickets are on sale again | ||||
| #  And the page has that filter available | ||||
| #  Then I should get a notification | ||||
| @@ -102,14 +102,15 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|     time.sleep(3) | ||||
|     wait_for_notification_endpoint_output() | ||||
|  | ||||
|     # Shouldn't exist, shouldn't have fired | ||||
|     assert not os.path.isfile("test-datastore/notification.txt") | ||||
|     # Now the filter should exist | ||||
|     set_response_with_filter() | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     time.sleep(3) | ||||
|  | ||||
|     wait_for_notification_endpoint_output() | ||||
|  | ||||
|     assert os.path.isfile("test-datastore/notification.txt") | ||||
|  | ||||
|   | ||||
| @@ -1,7 +1,9 @@ | ||||
| import os | ||||
| import time | ||||
| from loguru import logger | ||||
| from flask import url_for | ||||
| from .util import set_original_response, live_server_setup, extract_UUID_from_client, wait_for_all_checks | ||||
| from .util import set_original_response, live_server_setup, extract_UUID_from_client, wait_for_all_checks, \ | ||||
|     wait_for_notification_endpoint_output | ||||
| from changedetectionio.model import App | ||||
|  | ||||
|  | ||||
| @@ -21,10 +23,17 @@ def set_response_with_filter(): | ||||
|         f.write(test_return_data) | ||||
|     return None | ||||
|  | ||||
| def run_filter_test(client, content_filter): | ||||
| def run_filter_test(client, live_server, content_filter): | ||||
|  | ||||
|     # Response WITHOUT the filter ID element | ||||
|     set_original_response() | ||||
|  | ||||
|     # Goto the edit page, add our ignore text | ||||
|     notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json') | ||||
|  | ||||
|     # Add our URL to the import page | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
|  | ||||
|     # Give the endpoint time to spin up | ||||
|     time.sleep(1) | ||||
|     # cleanup for the next | ||||
|     client.get( | ||||
|         url_for("form_delete", uuid="all"), | ||||
| @@ -33,81 +42,92 @@ def run_filter_test(client, content_filter): | ||||
|     if os.path.isfile("test-datastore/notification.txt"): | ||||
|         os.unlink("test-datastore/notification.txt") | ||||
|  | ||||
|     # Add our URL to the import page | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
|     res = client.post( | ||||
|         url_for("form_quick_watch_add"), | ||||
|         data={"url": test_url, "tags": ''}, | ||||
|         url_for("import_page"), | ||||
|         data={"urls": test_url}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b"Watch added" in res.data | ||||
|  | ||||
|     # Give the thread time to pick up the first version | ||||
|     assert b"1 Imported" in res.data | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # Goto the edit page, add our ignore text | ||||
|     # Add our URL to the import page | ||||
|     url = url_for('test_notification_endpoint', _external=True) | ||||
|     notification_url = url.replace('http', 'json') | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|  | ||||
|     print(">>>> Notification URL: " + notification_url) | ||||
|     assert live_server.app.config['DATASTORE'].data['watching'][uuid]['consecutive_filter_failures'] == 0, "No filter = No filter failure" | ||||
|  | ||||
|     # Just a regular notification setting, this will be used by the special 'filter not found' notification | ||||
|     notification_form_data = {"notification_urls": notification_url, | ||||
|                               "notification_title": "New ChangeDetection.io Notification - {{watch_url}}", | ||||
|                               "notification_body": "BASE URL: {{base_url}}\n" | ||||
|                                                    "Watch URL: {{watch_url}}\n" | ||||
|                                                    "Watch UUID: {{watch_uuid}}\n" | ||||
|                                                    "Watch title: {{watch_title}}\n" | ||||
|                                                    "Watch tag: {{watch_tag}}\n" | ||||
|                                                    "Preview: {{preview_url}}\n" | ||||
|                                                    "Diff URL: {{diff_url}}\n" | ||||
|                                                    "Snapshot: {{current_snapshot}}\n" | ||||
|                                                    "Diff: {{diff}}\n" | ||||
|                                                    "Diff Full: {{diff_full}}\n" | ||||
|                                                    "Diff as Patch: {{diff_patch}}\n" | ||||
|                                                    ":-)", | ||||
|                               "notification_format": "Text"} | ||||
|  | ||||
|     notification_form_data.update({ | ||||
|         "url": test_url, | ||||
|         "tags": "my tag", | ||||
|         "title": "my title 123", | ||||
|         "headers": "", | ||||
|         "filter_failure_notification_send": 'y', | ||||
|         "include_filters": content_filter, | ||||
|         "fetch_backend": "html_requests"}) | ||||
|     watch_data = {"notification_urls": notification_url, | ||||
|                   "notification_title": "New ChangeDetection.io Notification - {{watch_url}}", | ||||
|                   "notification_body": "BASE URL: {{base_url}}\n" | ||||
|                                        "Watch URL: {{watch_url}}\n" | ||||
|                                        "Watch UUID: {{watch_uuid}}\n" | ||||
|                                        "Watch title: {{watch_title}}\n" | ||||
|                                        "Watch tag: {{watch_tag}}\n" | ||||
|                                        "Preview: {{preview_url}}\n" | ||||
|                                        "Diff URL: {{diff_url}}\n" | ||||
|                                        "Snapshot: {{current_snapshot}}\n" | ||||
|                                        "Diff: {{diff}}\n" | ||||
|                                        "Diff Full: {{diff_full}}\n" | ||||
|                                        "Diff as Patch: {{diff_patch}}\n" | ||||
|                                        ":-)", | ||||
|                   "notification_format": "Text", | ||||
|                   "fetch_backend": "html_requests", | ||||
|                   "filter_failure_notification_send": 'y', | ||||
|                   "headers": "", | ||||
|                   "tags": "my tag", | ||||
|                   "title": "my title 123", | ||||
|                   "time_between_check-hours": 5,  # So that the queue runner doesnt also put it in | ||||
|                   "url": test_url, | ||||
|                   } | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("edit_page", uuid="first"), | ||||
|         data=notification_form_data, | ||||
|         url_for("edit_page", uuid=uuid), | ||||
|         data=watch_data, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b"Updated watch." in res.data | ||||
|     wait_for_all_checks(client) | ||||
|     assert live_server.app.config['DATASTORE'].data['watching'][uuid]['consecutive_filter_failures'] == 0, "No filter = No filter failure" | ||||
|  | ||||
|     # Now the notification should not exist, because we didnt reach the threshold | ||||
|     # Now add a filter, because recheck hours == 5, ONLY pressing of the [edit] or [recheck all] should trigger | ||||
|     watch_data['include_filters'] = content_filter | ||||
|     res = client.post( | ||||
|         url_for("edit_page", uuid=uuid), | ||||
|         data=watch_data, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|  | ||||
|     # It should have checked once so far and given this error (because we hit SAVE) | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|     assert not os.path.isfile("test-datastore/notification.txt") | ||||
|  | ||||
|     # -2 because we would have checked twice above (on adding and on edit) | ||||
|     for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT-2): | ||||
|         res = client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     # Hitting [save] would have triggered a recheck, and we have a filter, so this would be ONE failure | ||||
|     assert live_server.app.config['DATASTORE'].data['watching'][uuid]['consecutive_filter_failures'] == 1, "Should have been checked once" | ||||
|  | ||||
|     # recheck it up to just before the threshold, including the fact that in the previous POST it would have rechecked (and incremented) | ||||
|     # Add 4 more checks | ||||
|     checked = 0 | ||||
|     ATTEMPT_THRESHOLD_SETTING = live_server.app.config['DATASTORE'].data['settings']['application'].get('filter_failure_notification_threshold_attempts', 0) | ||||
|     for i in range(0, ATTEMPT_THRESHOLD_SETTING - 2): | ||||
|         checked += 1 | ||||
|         client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|         wait_for_all_checks(client) | ||||
|         assert not os.path.isfile("test-datastore/notification.txt"), f"test-datastore/notification.txt should not exist - Attempt {i}" | ||||
|         res = client.get(url_for("index")) | ||||
|         assert b'Warning, no filters were found' in res.data | ||||
|         assert not os.path.isfile("test-datastore/notification.txt") | ||||
|         time.sleep(1) | ||||
|          | ||||
|     assert live_server.app.config['DATASTORE'].data['watching'][uuid]['consecutive_filter_failures'] == 5 | ||||
|  | ||||
|     # We should see something in the frontend | ||||
|     assert b'Warning, no filters were found' in res.data | ||||
|  | ||||
|     # One more check should trigger it (see -2 above) | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     wait_for_all_checks(client) | ||||
|     time.sleep(2) | ||||
|     # One more check should trigger the _FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT threshold | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     wait_for_all_checks(client) | ||||
|     wait_for_notification_endpoint_output() | ||||
|  | ||||
|     # Now it should exist and contain our "filter not found" alert | ||||
|     assert os.path.isfile("test-datastore/notification.txt") | ||||
|  | ||||
|     with open("test-datastore/notification.txt", 'r') as f: | ||||
|         notification = f.read() | ||||
|  | ||||
| @@ -120,10 +140,11 @@ def run_filter_test(client, content_filter): | ||||
|     set_response_with_filter() | ||||
|  | ||||
|     # Try several times, it should NOT have 'filter not found' | ||||
|     for i in range(0, App._FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT): | ||||
|     for i in range(0, ATTEMPT_THRESHOLD_SETTING + 2): | ||||
|         client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|         wait_for_all_checks(client) | ||||
|  | ||||
|     wait_for_notification_endpoint_output() | ||||
|     # It should have sent a notification, but.. | ||||
|     assert os.path.isfile("test-datastore/notification.txt") | ||||
|     # but it should not contain the info about a failed filter (because there was none in this case) | ||||
| @@ -132,9 +153,6 @@ def run_filter_test(client, content_filter): | ||||
|     assert not 'CSS/xPath filter was not present in the page' in notification | ||||
|  | ||||
|     # Re #1247 - All tokens got replaced correctly in the notification | ||||
|     res = client.get(url_for("index")) | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|     # UUID is correct, but notification contains tag uuid as UUIID wtf | ||||
|     assert uuid in notification | ||||
|  | ||||
|     # cleanup for the next | ||||
| @@ -148,14 +166,12 @@ def run_filter_test(client, content_filter): | ||||
| def test_setup(live_server): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
| def test_check_include_filters_failure_notification(client, live_server): | ||||
|     set_original_response() | ||||
|     wait_for_all_checks(client) | ||||
|     run_filter_test(client, '#nope-doesnt-exist') | ||||
| def test_check_include_filters_failure_notification(client, live_server, measure_memory_usage): | ||||
| #    live_server_setup(live_server) | ||||
|     run_filter_test(client, live_server,'#nope-doesnt-exist') | ||||
|  | ||||
| def test_check_xpath_filter_failure_notification(client, live_server): | ||||
|     set_original_response() | ||||
|     time.sleep(1) | ||||
|     run_filter_test(client, '//*[@id="nope-doesnt-exist"]') | ||||
| def test_check_xpath_filter_failure_notification(client, live_server, measure_memory_usage): | ||||
| #    live_server_setup(live_server) | ||||
|     run_filter_test(client, live_server, '//*[@id="nope-doesnt-exist"]') | ||||
|  | ||||
| # Test that notification is never sent | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| @@ -6,7 +6,7 @@ from .util import live_server_setup, wait_for_all_checks, extract_rss_token_from | ||||
| import os | ||||
|  | ||||
|  | ||||
| def test_setup(client, live_server): | ||||
| def test_setup(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
| def set_original_response(): | ||||
| @@ -39,7 +39,7 @@ def set_modified_response(): | ||||
|         f.write(test_return_data) | ||||
|     return None | ||||
|  | ||||
| def test_setup_group_tag(client, live_server): | ||||
| def test_setup_group_tag(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|     set_original_response() | ||||
|  | ||||
| @@ -130,7 +130,7 @@ def test_setup_group_tag(client, live_server): | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
| def test_tag_import_singular(client, live_server): | ||||
| def test_tag_import_singular(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|  | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
| @@ -150,7 +150,7 @@ def test_tag_import_singular(client, live_server): | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
| def test_tag_add_in_ui(client, live_server): | ||||
| def test_tag_add_in_ui(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
| # | ||||
|     res = client.post( | ||||
| @@ -167,7 +167,7 @@ def test_tag_add_in_ui(client, live_server): | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
| def test_group_tag_notification(client, live_server): | ||||
| def test_group_tag_notification(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|     set_original_response() | ||||
|  | ||||
| @@ -235,7 +235,7 @@ def test_group_tag_notification(client, live_server): | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
| def test_limit_tag_ui(client, live_server): | ||||
| def test_limit_tag_ui(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|  | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
| @@ -273,7 +273,7 @@ def test_limit_tag_ui(client, live_server): | ||||
|     assert b'Deleted' in res.data | ||||
|     res = client.get(url_for("tags.delete_all"), follow_redirects=True) | ||||
|     assert b'All tags deleted' in res.data | ||||
| def test_clone_tag_on_import(client, live_server): | ||||
| def test_clone_tag_on_import(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
|     res = client.post( | ||||
| @@ -298,7 +298,7 @@ def test_clone_tag_on_import(client, live_server): | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
| def test_clone_tag_on_quickwatchform_add(client, live_server): | ||||
| def test_clone_tag_on_quickwatchform_add(client, live_server, measure_memory_usage): | ||||
|     #live_server_setup(live_server) | ||||
|  | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
| @@ -328,7 +328,7 @@ def test_clone_tag_on_quickwatchform_add(client, live_server): | ||||
|     res = client.get(url_for("tags.delete_all"), follow_redirects=True) | ||||
|     assert b'All tags deleted' in res.data | ||||
|  | ||||
| def test_order_of_filters_tag_filter_and_watch_filter(client, live_server): | ||||
| def test_order_of_filters_tag_filter_and_watch_filter(client, live_server, measure_memory_usage): | ||||
|  | ||||
|     # Add a tag with some config, import a tag and it should roughly work | ||||
|     res = client.post( | ||||
|   | ||||
| @@ -1,19 +1,17 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import time | ||||
| import os | ||||
| import json | ||||
| import logging | ||||
| from flask import url_for | ||||
| from .util import live_server_setup | ||||
| from .util import live_server_setup, wait_for_all_checks | ||||
| from urllib.parse import urlparse, parse_qs | ||||
|  | ||||
| def test_consistent_history(client, live_server): | ||||
| def test_consistent_history(client, live_server, measure_memory_usage): | ||||
|     live_server_setup(live_server) | ||||
|  | ||||
|     # Give the endpoint time to spin up | ||||
|     time.sleep(1) | ||||
|     r = range(1, 50) | ||||
|     r = range(1, 30) | ||||
|  | ||||
|     for one in r: | ||||
|         test_url = url_for('test_endpoint', content_type="text/html", content=str(one), _external=True) | ||||
| @@ -25,15 +23,8 @@ def test_consistent_history(client, live_server): | ||||
|  | ||||
|         assert b"1 Imported" in res.data | ||||
|  | ||||
|     time.sleep(3) | ||||
|     while True: | ||||
|         res = client.get(url_for("index")) | ||||
|         logging.debug("Waiting for 'Checking now' to go away..") | ||||
|         if b'Checking now' not in res.data: | ||||
|             break | ||||
|         time.sleep(0.5) | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     time.sleep(3) | ||||
|     # Essentially just triggers the DB write/update | ||||
|     res = client.post( | ||||
|         url_for("settings_page"), | ||||
| @@ -44,8 +35,9 @@ def test_consistent_history(client, live_server): | ||||
|     ) | ||||
|     assert b"Settings updated." in res.data | ||||
|  | ||||
|     # Give it time to write it out | ||||
|     time.sleep(3) | ||||
|  | ||||
|     time.sleep(2) | ||||
|  | ||||
|     json_db_file = os.path.join(live_server.app.config['DATASTORE'].datastore_path, 'url-watches.json') | ||||
|  | ||||
|     json_obj = None | ||||
| @@ -58,7 +50,7 @@ def test_consistent_history(client, live_server): | ||||
|     # each one should have a history.txt containing just one line | ||||
|     for w in json_obj['watching'].keys(): | ||||
|         history_txt_index_file = os.path.join(live_server.app.config['DATASTORE'].datastore_path, w, 'history.txt') | ||||
|         assert os.path.isfile(history_txt_index_file), "History.txt should exist where I expect it - {}".format(history_txt_index_file) | ||||
|         assert os.path.isfile(history_txt_index_file), f"History.txt should exist where I expect it at {history_txt_index_file}" | ||||
|  | ||||
|         # Same like in model.Watch | ||||
|         with open(history_txt_index_file, "r") as f: | ||||
| @@ -70,15 +62,20 @@ def test_consistent_history(client, live_server): | ||||
|                                                      w)) | ||||
|         # Find the snapshot one | ||||
|         for fname in files_in_watch_dir: | ||||
|             if fname != 'history.txt': | ||||
|             if fname != 'history.txt' and 'html' not in fname: | ||||
|                 # contents should match what we requested as content returned from the test url | ||||
|                 with open(os.path.join(live_server.app.config['DATASTORE'].datastore_path, w, fname), 'r') as snapshot_f: | ||||
|                     contents = snapshot_f.read() | ||||
|                     watch_url = json_obj['watching'][w]['url'] | ||||
|                     u = urlparse(watch_url) | ||||
|                     q = parse_qs(u[4]) | ||||
|                     assert q['content'][0] == contents.strip(), "Snapshot file {} should contain {}".format(fname, q['content'][0]) | ||||
|                     assert q['content'][0] == contents.strip(), f"Snapshot file {fname} should contain {q['content'][0]}" | ||||
|  | ||||
|  | ||||
|  | ||||
|         assert len(files_in_watch_dir) == 2, "Should be just two files in the dir, history.txt and the snapshot" | ||||
|         assert len(files_in_watch_dir) == 3, "Should be just three files in the dir, html.br snapshot, history.txt and the extracted text snapshot" | ||||
|  | ||||
|  | ||||
|     json_db_file = os.path.join(live_server.app.config['DATASTORE'].datastore_path, 'url-watches.json') | ||||
|     with open(json_db_file, 'r') as f: | ||||
|         assert '"default"' not in f.read(), "'default' probably shouldnt be here, it came from when the 'default' Watch vars were accidently being saved" | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
| """Test suite for the method to extract text from an html string""" | ||||
| from ..html_tools import html_to_text | ||||
|  | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| #!/usr/bin/python3 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| from . util import live_server_setup | ||||
| from changedetectionio import html_tools | ||||
| @@ -9,8 +9,6 @@ def test_setup(live_server): | ||||
| # Unit test of the stripper | ||||
| # Always we are dealing in utf-8 | ||||
| def test_strip_regex_text_func(): | ||||
|     from ..processors import text_json_diff as fetch_site_status | ||||
|  | ||||
|     test_content = """ | ||||
|     but sometimes we want to remove the lines. | ||||
|      | ||||
|   | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user