Compare commits
	
		
			120 Commits
		
	
	
		
			visualsele
			...
			update-pyp
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 227b7e20ac | ||
|   | 38c4768b92 | ||
|   | 6555d99044 | ||
|   | e719dbd19b | ||
|   | b28a8316cc | ||
|   | e609a2d048 | ||
|   | 994d34c776 | ||
|   | de776800e9 | ||
|   | 8b8ed58f20 | ||
|   | 79c6d765de | ||
|   | c6db7fc90e | ||
|   | bc587efae2 | ||
|   | 6ee6be1a5f | ||
|   | c83485094b | ||
|   | 387ce32e6f | ||
|   | 6b9a788d75 | ||
|   | 14e632bc19 | ||
|   | 52c895b2e8 | ||
|   | a62043e086 | ||
|   | 3d390b6ea4 | ||
|   | 301a40ca34 | ||
|   | 1c099cdba6 | ||
|   | af747e6e3f | ||
|   | aefad0bdf6 | ||
|   | 904ef84f82 | ||
|   | d2569ba715 | ||
|   | ccb42bcb12 | ||
|   | 4163030805 | ||
|   | 140d375ad0 | ||
|   | 1a608d0ae6 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | e6ed91cfe3 | ||
|   | 008272cd77 | ||
|   | 823a0c99f4 | ||
|   | 1f57d9d0b6 | ||
|   | 3287283065 | ||
|   | c5a4e0aaa3 | ||
|   | 5119efe4fb | ||
|   | 78a2dceb81 | ||
|   | 72c7645f60 | ||
|   | e09eb47fb7 | ||
|   | 616c0b3f65 | ||
|   | c90b27823a | ||
|   | 3b16b19a94 | ||
|   | 4ee9fa79e1 | ||
|   | 4b49759113 | ||
|   | e9a9790cb0 | ||
|   | 593660e2f6 | ||
|   | 7d96b4ba83 | ||
|   | fca40e4d5b | ||
|   | 66e2dfcead | ||
|   | bce7eb68fb | ||
|   | 93c0385119 | ||
|   | e17f3be739 | ||
|   | 3a9f79b756 | ||
|   | 1f5670253e | ||
|   | fe3cf5ffd2 | ||
|   | d31a45d49a | ||
|   | 19ee65361d | ||
|   | 677082723c | ||
|   | 96793890f8 | ||
|   | 0439155127 | ||
| ![dependabot[bot]](/assets/img/avatar_default.png)  | 29ca2521eb | ||
|   | 7d67ad057c | ||
|   | 2e88872b7e | ||
|   | b30b718373 | ||
|   | 402f1e47e7 | ||
|   | 9510345e01 | ||
|   | 36085d8cf4 | ||
|   | 399cdf0fbf | ||
|   | 4be0fafa93 | ||
|   | 51ce7ac66e | ||
|   | c3d825f38c | ||
|   | fc3c4b804d | ||
|   | 1749c07750 | ||
|   | 65428655b8 | ||
|   | 8be0029260 | ||
|   | 3c727ca54b | ||
|   | 4596532090 | ||
|   | d0a88d54a1 | ||
|   | 21ab4b16a0 | ||
|   | 77133de1cf | ||
|   | 0d92be348a | ||
|   | 3ac0c9346c | ||
|   | b6d8db4c67 | ||
|   | 436a66d465 | ||
|   | 764514e5eb | ||
|   | ad3ffb6ccb | ||
|   | e051b29bf2 | ||
|   | 126852b778 | ||
|   | d115b2c858 | ||
|   | 2db04e4211 | ||
|   | 946a556fb6 | ||
|   | eda23678aa | ||
|   | 273bd45ad7 | ||
|   | 3d1e1025d2 | ||
|   | 5528b7c4b3 | ||
|   | 0dce3f4fec | ||
|   | af4311a68c | ||
|   | 792fedb8bc | ||
|   | 824748df9e | ||
|   | c086ec0d68 | ||
|   | 8e207ba438 | ||
|   | f0823126c8 | ||
|   | 98f56736c1 | ||
|   | 872bd2de85 | ||
|   | e6de1dd135 | ||
|   | 599291645d | ||
|   | 156d403552 | ||
|   | 7fe0ef7099 | ||
|   | fe70beeaed | ||
|   | abf7ed9085 | ||
|   | 19e752e9ba | ||
|   | 684e96f5f1 | ||
|   | 8f321139fd | ||
|   | 7fdae82e46 | ||
|   | bbc18d8e80 | ||
|   | d8ee5472f1 | ||
|   | 8fd57280b7 | ||
|   | 0285d00f13 | ||
|   | f7f98945a2 | 
							
								
								
									
										10
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,10 @@ | ||||
| version: 2 | ||||
| updates: | ||||
|   - package-ecosystem: github-actions | ||||
|     directory: / | ||||
|     schedule: | ||||
|       interval: "weekly" | ||||
|     groups: | ||||
|       all: | ||||
|         patterns: | ||||
|         - "*" | ||||
							
								
								
									
										2
									
								
								.github/test/Dockerfile-alpine
									
									
									
									
										vendored
									
									
								
							
							
						
						| @@ -12,8 +12,10 @@ RUN \ | ||||
|     cargo \ | ||||
|     g++ \ | ||||
|     gcc \ | ||||
|     jpeg-dev \ | ||||
|     libc-dev \ | ||||
|     libffi-dev \ | ||||
|     libjpeg \ | ||||
|     libxslt-dev \ | ||||
|     make \ | ||||
|     openssl-dev \ | ||||
|   | ||||
							
								
								
									
										6
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						| @@ -34,7 +34,7 @@ jobs: | ||||
|  | ||||
|     # Initializes the CodeQL tools for scanning. | ||||
|     - name: Initialize CodeQL | ||||
|       uses: github/codeql-action/init@v2 | ||||
|       uses: github/codeql-action/init@v3 | ||||
|       with: | ||||
|         languages: ${{ matrix.language }} | ||||
|         # If you wish to specify custom queries, you can do so here or in a config file. | ||||
| @@ -45,7 +45,7 @@ jobs: | ||||
|     # Autobuild attempts to build any compiled languages  (C/C++, C#, or Java). | ||||
|     # If this step fails, then you should remove it and run the build manually (see below) | ||||
|     - name: Autobuild | ||||
|       uses: github/codeql-action/autobuild@v2 | ||||
|       uses: github/codeql-action/autobuild@v3 | ||||
|  | ||||
|     # ℹ️ Command-line programs to run using the OS shell. | ||||
|     # 📚 https://git.io/JvXDl | ||||
| @@ -59,4 +59,4 @@ jobs: | ||||
|     #   make release | ||||
|  | ||||
|     - name: Perform CodeQL Analysis | ||||
|       uses: github/codeql-action/analyze@v2 | ||||
|       uses: github/codeql-action/analyze@v3 | ||||
|   | ||||
							
								
								
									
										2
									
								
								.github/workflows/containers.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						| @@ -41,7 +41,7 @@ jobs: | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Set up Python 3.11 | ||||
|         uses: actions/setup-python@v4 | ||||
|         uses: actions/setup-python@v5 | ||||
|         with: | ||||
|           python-version: 3.11 | ||||
|  | ||||
|   | ||||
							
								
								
									
										77
									
								
								.github/workflows/pypi-release.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,77 @@ | ||||
| name: Publish Python 🐍distribution 📦 to PyPI and TestPyPI | ||||
|  | ||||
| on: push | ||||
| jobs: | ||||
|   build: | ||||
|     name: Build distribution 📦 | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     steps: | ||||
|     - uses: actions/checkout@v4 | ||||
|     - name: Set up Python | ||||
|       uses: actions/setup-python@v5 | ||||
|       with: | ||||
|         python-version: "3.11" | ||||
|     - name: Install pypa/build | ||||
|       run: >- | ||||
|         python3 -m | ||||
|         pip install | ||||
|         build | ||||
|         --user | ||||
|     - name: Build a binary wheel and a source tarball | ||||
|       run: python3 -m build | ||||
|     - name: Store the distribution packages | ||||
|       uses: actions/upload-artifact@v4 | ||||
|       with: | ||||
|         name: python-package-distributions | ||||
|         path: dist/ | ||||
|  | ||||
|  | ||||
|   test-pypi-package: | ||||
|     name: Test the built 📦 package works basically. | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: | ||||
|     - build | ||||
|     steps: | ||||
|     - name: Download all the dists | ||||
|       uses: actions/download-artifact@v4 | ||||
|       with: | ||||
|         name: python-package-distributions | ||||
|         path: dist/ | ||||
|     - name: Set up Python 3.11 | ||||
|       uses: actions/setup-python@v5 | ||||
|       with: | ||||
|         python-version: '3.11' | ||||
|     - name: Test that the basic pip built package runs without error | ||||
|       run: | | ||||
|         set -ex | ||||
|         sudo pip3 install --upgrade pip  | ||||
|         pip3 install dist/changedetection.io*.whl | ||||
|         changedetection.io -d /tmp -p 10000 & | ||||
|         sleep 3 | ||||
|         curl --retry-connrefused --retry 6 http://127.0.0.1:10000/static/styles/pure-min.css >/dev/null | ||||
|         curl --retry-connrefused --retry 6 http://127.0.0.1:10000/ >/dev/null | ||||
|         killall changedetection.io | ||||
|  | ||||
|  | ||||
|   publish-to-pypi: | ||||
|     name: >- | ||||
|       Publish Python 🐍 distribution 📦 to PyPI | ||||
|     if: startsWith(github.ref, 'refs/tags/')  # only publish to PyPI on tag pushes | ||||
|     needs: | ||||
|     - test-pypi-package | ||||
|     runs-on: ubuntu-latest | ||||
|     environment: | ||||
|       name: release | ||||
|       url: https://pypi.org/p/changedetection.io | ||||
|     permissions: | ||||
|       id-token: write  # IMPORTANT: mandatory for trusted publishing | ||||
|  | ||||
|     steps: | ||||
|     - name: Download all the dists | ||||
|       uses: actions/download-artifact@v4 | ||||
|       with: | ||||
|         name: python-package-distributions | ||||
|         path: dist/ | ||||
|     - name: Publish distribution 📦 to PyPI | ||||
|       uses: pypa/gh-action-pypi-publish@release/v1 | ||||
							
								
								
									
										4
									
								
								.github/workflows/test-container-build.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						| @@ -11,12 +11,14 @@ on: | ||||
|       - requirements.txt | ||||
|       - Dockerfile | ||||
|       - .github/workflows/* | ||||
|       - .github/test/Dockerfile* | ||||
|  | ||||
|   pull_request: | ||||
|     paths: | ||||
|       - requirements.txt | ||||
|       - Dockerfile | ||||
|       - .github/workflows/* | ||||
|       - .github/test/Dockerfile* | ||||
|  | ||||
|   # Changes to requirements.txt packages and Dockerfile may or may not always be compatible with arm etc, so worth testing | ||||
|   # @todo: some kind of path filter for requirements.txt and Dockerfile | ||||
| @@ -26,7 +28,7 @@ jobs: | ||||
|     steps: | ||||
|         - uses: actions/checkout@v4 | ||||
|         - name: Set up Python 3.11 | ||||
|           uses: actions/setup-python@v4 | ||||
|           uses: actions/setup-python@v5 | ||||
|           with: | ||||
|             python-version: 3.11 | ||||
|  | ||||
|   | ||||
							
								
								
									
										186
									
								
								.github/workflows/test-only.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						| @@ -11,7 +11,7 @@ jobs: | ||||
|  | ||||
|       # Mainly just for link/flake8 | ||||
|       - name: Set up Python 3.11 | ||||
|         uses: actions/setup-python@v4 | ||||
|         uses: actions/setup-python@v5 | ||||
|         with: | ||||
|           python-version: '3.11' | ||||
|  | ||||
| @@ -27,18 +27,18 @@ jobs: | ||||
|         run: | | ||||
|            | ||||
|           docker network create changedet-network | ||||
|  | ||||
|           # Selenium+browserless | ||||
|           docker run --network changedet-network -d --hostname selenium  -p 4444:4444 --rm --shm-size="2g"  selenium/standalone-chrome:4 | ||||
|           docker run --network changedet-network -d --name browserless --hostname browserless -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm  -p 3000:3000  --shm-size="2g"  browserless/chrome:1.60-chrome-stable | ||||
|            | ||||
|           # For accessing custom browser tests | ||||
|           docker run --network changedet-network -d --name browserless-custom-url --hostname browserless-custom-url -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm --shm-size="2g"  browserless/chrome:1.60-chrome-stable | ||||
|           # Selenium | ||||
|           docker run --network changedet-network -d --hostname selenium  -p 4444:4444 --rm --shm-size="2g"  selenium/standalone-chrome:4 | ||||
|            | ||||
|           # SocketPuppetBrowser + Extra for custom browser test | ||||
|           docker run --network changedet-network -d -e "LOG_LEVEL=TRACE" --cap-add=SYS_ADMIN --name sockpuppetbrowser --hostname sockpuppetbrowser --rm -p 3000:3000 dgtlmoon/sockpuppetbrowser:latest                     | ||||
|           docker run --network changedet-network -d -e "LOG_LEVEL=TRACE" --cap-add=SYS_ADMIN --name sockpuppetbrowser-custom-url --hostname sockpuppetbrowser-custom-url  -p 3001:3000 --rm dgtlmoon/sockpuppetbrowser:latest | ||||
|  | ||||
|       - name: Build changedetection.io container for testing | ||||
|         run: |          | ||||
|           # Build a changedetection.io container and start testing inside | ||||
|           docker build . -t test-changedetectionio | ||||
|           docker build --build-arg LOGGER_LEVEL=TRACE -t test-changedetectionio . | ||||
|           # Debug info | ||||
|           docker run test-changedetectionio  bash -c 'pip list' | ||||
|  | ||||
| @@ -47,46 +47,94 @@ jobs: | ||||
|           # Debug SMTP server/echo message back server | ||||
|           docker run --network changedet-network -d -p 11025:11025 -p 11080:11080  --hostname mailserver test-changedetectionio  bash -c 'python changedetectionio/tests/smtp/smtp-test-server.py'  | ||||
|  | ||||
|       - name: Test built container with pytest | ||||
|       - name: Show docker container state and other debug info | ||||
|         run: | | ||||
|           set -x | ||||
|           echo "Running processes in docker..." | ||||
|           docker ps | ||||
|  | ||||
|       - name: Test built container with Pytest (generally as requests/plaintext fetching) | ||||
|         run: | | ||||
|           # Unit tests | ||||
|           echo "run test with unittest" | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_notification_diff' | ||||
|           docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_watch_model' | ||||
|            | ||||
|           # All tests | ||||
|           docker run --network changedet-network  test-changedetectionio  bash -c 'cd changedetectionio && ./run_basic_tests.sh' | ||||
|           echo "run test with pytest" | ||||
|           # The default pytest logger_level is TRACE | ||||
|           # To change logger_level for pytest(test/conftest.py), | ||||
|           # append the docker option. e.g. '-e LOGGER_LEVEL=DEBUG' | ||||
|           docker run --name test-cdio-basic-tests --network changedet-network  test-changedetectionio  bash -c 'cd changedetectionio && ./run_basic_tests.sh' | ||||
|  | ||||
|       - name: Test built container selenium+browserless/playwright | ||||
| # PLAYWRIGHT/NODE-> CDP | ||||
|       - name: Playwright and SocketPuppetBrowser - Specific tests in built container | ||||
|         run: | | ||||
|           # Playwright via Sockpuppetbrowser fetch | ||||
|           # tests/visualselector/test_fetch_data.py will do browser steps   | ||||
|           docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio  bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/fetchers/test_content.py' | ||||
|           docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio  bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_errorhandling.py' | ||||
|           docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio  bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/visualselector/test_fetch_data.py' | ||||
|           docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio  bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/fetchers/test_custom_js_before_content.py' | ||||
|  | ||||
|  | ||||
|       - name: Playwright and SocketPuppetBrowser - Headers and requests | ||||
|         run: |        | ||||
|           # Settings headers playwright tests - Call back in from Sockpuppetbrowser, check headers | ||||
|           docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000?dumpio=true" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0  --live-server-port=5004 tests/test_request.py' | ||||
|  | ||||
|       - name: Playwright and SocketPuppetBrowser - Restock detection | ||||
|         run: |                             | ||||
|           # restock detection via playwright - added name=changedet here so that playwright and sockpuppetbrowser can connect to it | ||||
|           docker run --rm --name "changedet" -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio;pytest --live-server-port=5004 --live-server-host=0.0.0.0 tests/restock/test_restock.py' | ||||
|  | ||||
| # STRAIGHT TO CDP | ||||
|       - name: Pyppeteer and SocketPuppetBrowser - Specific tests in built container | ||||
|         run: | | ||||
|           # Playwright via Sockpuppetbrowser fetch  | ||||
|           docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "FAST_PUPPETEER_CHROME_FETCHER=True" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio  bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/fetchers/test_content.py' | ||||
|           docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "FAST_PUPPETEER_CHROME_FETCHER=True" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio  bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_errorhandling.py' | ||||
|           docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "FAST_PUPPETEER_CHROME_FETCHER=True" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio  bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/visualselector/test_fetch_data.py' | ||||
|           docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "FAST_PUPPETEER_CHROME_FETCHER=True" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio  bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/fetchers/test_custom_js_before_content.py' | ||||
|  | ||||
|       - name: Pyppeteer and SocketPuppetBrowser - Headers and requests checks | ||||
|         run: |        | ||||
|           # Settings headers playwright tests - Call back in from Sockpuppetbrowser, check headers | ||||
|           docker run --name "changedet" --hostname changedet --rm  -e "FAST_PUPPETEER_CHROME_FETCHER=True" -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000?dumpio=true" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0  --live-server-port=5004 tests/test_request.py' | ||||
|  | ||||
|       - name: Pyppeteer and SocketPuppetBrowser - Restock detection | ||||
|         run: |                             | ||||
|           # restock detection via playwright - added name=changedet here so that playwright and sockpuppetbrowser can connect to it | ||||
|           docker run --rm --name "changedet" -e "FLASK_SERVER_NAME=changedet"  -e "FAST_PUPPETEER_CHROME_FETCHER=True"  -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio;pytest --live-server-port=5004 --live-server-host=0.0.0.0 tests/restock/test_restock.py' | ||||
|  | ||||
| # SELENIUM | ||||
|       - name: Specific tests in built container for Selenium | ||||
|         run: | | ||||
|            | ||||
|           # Selenium fetch | ||||
|           docker run --rm -e "WEBDRIVER_URL=http://selenium:4444/wd/hub" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py' | ||||
|            | ||||
|           # Playwright/Browserless fetch | ||||
|           docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py' | ||||
|            | ||||
|           # Settings headers playwright tests - Call back in from Browserless, check headers | ||||
|           docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0  --live-server-port=5004 tests/test_request.py' | ||||
|           docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "WEBDRIVER_URL=http://selenium:4444/wd/hub" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0  --live-server-port=5004 tests/test_request.py' | ||||
|           docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0  --live-server-port=5004 tests/test_request.py'           | ||||
|            | ||||
|           # restock detection via playwright - added name=changedet here so that playwright/browserless can connect to it | ||||
|           docker run --rm --name "changedet" -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio;pytest --live-server-port=5004 --live-server-host=0.0.0.0 tests/restock/test_restock.py' | ||||
|  | ||||
|       - name: Specific tests in built container for headers and requests checks with Selenium | ||||
|         run: | | ||||
|           docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "WEBDRIVER_URL=http://selenium:4444/wd/hub" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0  --live-server-port=5004 tests/test_request.py' | ||||
|  | ||||
| # OTHER STUFF | ||||
|       - name: Test SMTP notification mime types | ||||
|         run: | | ||||
|           # SMTP content types - needs the 'Debug SMTP server/echo message back server' container from above | ||||
|           docker run --rm  --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/smtp/test_notification_smtp.py' | ||||
|  | ||||
|       - name: Test with puppeteer fetcher and disk cache | ||||
|         run: | | ||||
|           docker run --rm -e "PUPPETEER_DISK_CACHE=/tmp/data/" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py' | ||||
|           # Browserless would have had -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" added above | ||||
|  | ||||
|       - name: Test proxy interaction | ||||
|       # @todo Add a test via playwright/puppeteer | ||||
|       # squid with auth is tested in run_proxy_tests.sh -> tests/proxy_list/test_select_custom_proxy.py | ||||
|       - name: Test proxy squid style interaction | ||||
|         run: | | ||||
|           cd changedetectionio | ||||
|           ./run_proxy_tests.sh | ||||
|           # And again with PLAYWRIGHT_DRIVER_URL=.. | ||||
|           cd .. | ||||
|  | ||||
|       - name: Test proxy SOCKS5 style interaction | ||||
|         run: | | ||||
|           cd changedetectionio | ||||
|           ./run_socks_proxy_tests.sh | ||||
|           cd .. | ||||
|  | ||||
|       - name: Test custom browser URL | ||||
| @@ -97,15 +145,79 @@ jobs: | ||||
|  | ||||
|       - name: Test changedetection.io container starts+runs basically without error | ||||
|         run: | | ||||
|           docker run -p 5556:5000 -d test-changedetectionio | ||||
|           docker run --name test-changedetectionio -p 5556:5000  -d test-changedetectionio | ||||
|           sleep 3 | ||||
|           # Should return 0 (no error) when grep finds it | ||||
|           curl -s http://localhost:5556 |grep -q checkbox-uuid | ||||
|           curl --retry-connrefused --retry 6  -s http://localhost:5556 |grep -q checkbox-uuid | ||||
|            | ||||
|           # and IPv6 | ||||
|           curl -s -g -6 "http://[::1]:5556"|grep -q checkbox-uuid | ||||
|            | ||||
|           curl --retry-connrefused --retry 6  -s -g -6 "http://[::1]:5556"|grep -q checkbox-uuid | ||||
|  | ||||
| #export WEBDRIVER_URL=http://localhost:4444/wd/hub | ||||
| #pytest tests/fetchers/test_content.py | ||||
| #pytest tests/test_errorhandling.py | ||||
|           # Check whether TRACE log is enabled. | ||||
|           # Also, check whether TRACE is came from STDERR | ||||
|           docker logs test-changedetectionio 2>&1 1>/dev/null | grep 'TRACE log is enabled' || exit 1 | ||||
|           # Check whether DEBUG is came from STDOUT | ||||
|           docker logs test-changedetectionio 2>/dev/null | grep 'DEBUG' || exit 1 | ||||
|  | ||||
|           docker kill test-changedetectionio | ||||
|  | ||||
|       - name: Test changedetection.io SIGTERM and SIGINT signal shutdown | ||||
|         run: | | ||||
|            | ||||
|           echo SIGINT Shutdown request test | ||||
|           docker run --name sig-test -d test-changedetectionio | ||||
|           sleep 3 | ||||
|           echo ">>> Sending SIGINT to sig-test container" | ||||
|           docker kill --signal=SIGINT sig-test | ||||
|           sleep 3 | ||||
|           # invert the check (it should be not 0/not running) | ||||
|           docker ps | ||||
|           # check signal catch(STDERR) log. Because of | ||||
|           # changedetectionio/__init__.py: logger.add(sys.stderr, level=logger_level) | ||||
|           docker logs sig-test 2>&1 | grep 'Shutdown: Got Signal - SIGINT' || exit 1 | ||||
|           test -z "`docker ps|grep sig-test`" | ||||
|           if [ $? -ne 0 ] | ||||
|           then | ||||
|             echo "Looks like container was running when it shouldnt be" | ||||
|             docker ps | ||||
|             exit 1 | ||||
|           fi | ||||
|            | ||||
|           # @todo - scan the container log to see the right "graceful shutdown" text exists  | ||||
|           docker rm sig-test | ||||
|            | ||||
|           echo SIGTERM Shutdown request test | ||||
|           docker run --name sig-test -d test-changedetectionio | ||||
|           sleep 3 | ||||
|           echo ">>> Sending SIGTERM to sig-test container" | ||||
|           docker kill --signal=SIGTERM sig-test | ||||
|           sleep 3 | ||||
|           # invert the check (it should be not 0/not running) | ||||
|           docker ps | ||||
|           # check signal catch(STDERR) log. Because of | ||||
|           # changedetectionio/__init__.py: logger.add(sys.stderr, level=logger_level) | ||||
|           docker logs sig-test 2>&1 | grep 'Shutdown: Got Signal - SIGTERM' || exit 1 | ||||
|           test -z "`docker ps|grep sig-test`" | ||||
|           if [ $? -ne 0 ] | ||||
|           then | ||||
|             echo "Looks like container was running when it shouldnt be" | ||||
|             docker ps | ||||
|             exit 1 | ||||
|           fi | ||||
|            | ||||
|           # @todo - scan the container log to see the right "graceful shutdown" text exists            | ||||
|           docker rm sig-test | ||||
|  | ||||
|       - name: Dump container log | ||||
|         if: always() | ||||
|         run: | | ||||
|           mkdir output-logs | ||||
|           docker logs test-cdio-basic-tests > output-logs/test-cdio-basic-tests-stdout.txt | ||||
|           docker logs test-cdio-basic-tests 2> output-logs/test-cdio-basic-tests-stderr.txt | ||||
|  | ||||
|       - name: Store container log | ||||
|         if: always() | ||||
|         uses: actions/upload-artifact@v4 | ||||
|         with: | ||||
|           name: test-cdio-basic-tests-output | ||||
|           path: output-logs | ||||
|   | ||||
							
								
								
									
										36
									
								
								.github/workflows/test-pip-build.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						| @@ -1,36 +0,0 @@ | ||||
| name: ChangeDetection.io PIP package test | ||||
|  | ||||
| # Triggers the workflow on push or pull request events | ||||
|  | ||||
| # This line doesnt work, even tho it is the documented one | ||||
| on: [push, pull_request] | ||||
|  | ||||
|   # Changes to requirements.txt packages and Dockerfile may or may not always be compatible with arm etc, so worth testing | ||||
|   # @todo: some kind of path filter for requirements.txt and Dockerfile | ||||
| jobs: | ||||
|   test-pip-build-basics: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|         - uses: actions/checkout@v4 | ||||
|  | ||||
|         - name: Set up Python 3.11 | ||||
|           uses: actions/setup-python@v4 | ||||
|           with: | ||||
|             python-version: 3.11 | ||||
|  | ||||
|  | ||||
|         - name: Test that the basic pip built package runs without error | ||||
|           run: | | ||||
|             set -e | ||||
|             mkdir dist | ||||
|             pip3 install wheel | ||||
|             python3 setup.py bdist_wheel             | ||||
|             pip3 install -r requirements.txt | ||||
|             rm ./changedetection.py | ||||
|             rm -rf changedetectio | ||||
|              | ||||
|             pip3 install dist/changedetection.io*.whl | ||||
|             changedetection.io -d /tmp -p 10000 & | ||||
|             sleep 3 | ||||
|             curl http://127.0.0.1:10000/static/styles/pure-min.css >/dev/null | ||||
|             killall -9 changedetection.io | ||||
							
								
								
									
										24
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						| @@ -1,5 +1,8 @@ | ||||
| # pip dependencies install stage | ||||
| FROM python:3.11-slim-bookworm as builder | ||||
|  | ||||
| # @NOTE! I would love to move to 3.11 but it breaks the async handler in changedetectionio/content_fetchers/puppeteer.py | ||||
| #        If you know how to fix it, please do! and test it for both 3.10 and 3.11 | ||||
| FROM python:3.10-slim-bookworm as builder | ||||
|  | ||||
| # See `cryptography` pin comment in requirements.txt | ||||
| ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1 | ||||
| @@ -25,11 +28,11 @@ RUN pip install --target=/dependencies -r /requirements.txt | ||||
| # Playwright is an alternative to Selenium | ||||
| # Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing | ||||
| # https://github.com/dgtlmoon/changedetection.io/pull/1067 also musl/alpine (not supported) | ||||
| RUN pip install --target=/dependencies playwright~=1.39 \ | ||||
| RUN pip install --target=/dependencies playwright~=1.41.2 \ | ||||
|     || echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled." | ||||
|  | ||||
| # Final image stage | ||||
| FROM python:3.11-slim-bookworm | ||||
| FROM python:3.10-slim-bookworm | ||||
|  | ||||
| RUN apt-get update && apt-get install -y --no-install-recommends \ | ||||
|     libxslt1.1 \ | ||||
| @@ -53,12 +56,17 @@ ENV PYTHONPATH=/usr/local | ||||
|  | ||||
| EXPOSE 5000 | ||||
|  | ||||
| # The actual flask app | ||||
| # The actual flask app module | ||||
| COPY changedetectionio /app/changedetectionio | ||||
|  | ||||
| # The eventlet server wrapper | ||||
| # Starting wrapper | ||||
| COPY changedetection.py /app/changedetection.py | ||||
|  | ||||
| WORKDIR /app | ||||
| # Github Action test purpose(test-only.yml). | ||||
| # On production, it is effectively LOGGER_LEVEL=''. | ||||
| ARG LOGGER_LEVEL='' | ||||
| ENV LOGGER_LEVEL "$LOGGER_LEVEL" | ||||
|  | ||||
| WORKDIR /app | ||||
| CMD ["python", "./changedetection.py", "-d", "/datastore"] | ||||
|  | ||||
|  | ||||
| CMD [ "python", "./changedetection.py" , "-d", "/datastore"] | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| recursive-include changedetectionio/api * | ||||
| recursive-include changedetectionio/blueprint * | ||||
| recursive-include changedetectionio/content_fetchers * | ||||
| recursive-include changedetectionio/model * | ||||
| recursive-include changedetectionio/processors * | ||||
| recursive-include changedetectionio/res * | ||||
| recursive-include changedetectionio/static * | ||||
| recursive-include changedetectionio/templates * | ||||
| recursive-include changedetectionio/tests * | ||||
| @@ -10,6 +10,8 @@ prune changedetectionio/static/package-lock.json | ||||
| prune changedetectionio/static/styles/node_modules | ||||
| prune changedetectionio/static/styles/package-lock.json | ||||
| include changedetection.py | ||||
| include requirements.txt | ||||
| include README-pip.md | ||||
| global-exclude *.pyc | ||||
| global-exclude node_modules | ||||
| global-exclude venv | ||||
|   | ||||
							
								
								
									
										27
									
								
								README.md
									
									
									
									
									
								
							
							
						
						| @@ -11,12 +11,13 @@ _Live your data-life pro-actively._ | ||||
|  | ||||
|  | ||||
|  | ||||
| [**Don't have time? Let us host it for you! try our $8.99/month subscription - use our proxies and support!**](https://changedetection.io) , _half the price of other website change monitoring services!_ | ||||
| [**Get started with website page change monitoring straight away. Don't have time? Try our $8.99/month subscription, use our proxies and support!**](https://changedetection.io) , _half the price of other website change monitoring services!_ | ||||
|  | ||||
| - Chrome browser included. | ||||
| - Nothing to install, access via browser login after signup. | ||||
| - Super fast, no registration needed setup. | ||||
| - Get started watching and receiving website change notifications straight away. | ||||
|  | ||||
| - See our [tutorials and how-to page for more inspiration](https://changedetection.io/tutorials)  | ||||
|  | ||||
| ### Target specific parts of the webpage using the Visual Selector tool. | ||||
|  | ||||
| @@ -90,6 +91,14 @@ We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) glob | ||||
|  | ||||
| Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/ | ||||
|  | ||||
| ### We have a Chrome extension! | ||||
|  | ||||
| Easily add the current web page to your changedetection.io tool, simply install the extension and click "Sync" to connect it to your existing changedetection.io install. | ||||
|  | ||||
| [<img src="./docs/chrome-extension-screenshot.png" style="max-width:80%;" alt="Chrome Extension to easily add the current web-page to detect a change."  title="Chrome Extension to easily add the current web-page to detect a change."  />](https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop) | ||||
|  | ||||
| [Goto the Chrome Webstore to download the extension.](https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop) | ||||
|  | ||||
| ## Installation | ||||
|  | ||||
| ### Docker | ||||
| @@ -97,7 +106,7 @@ Please :star: star :star: this project and help it grow! https://github.com/dgtl | ||||
| With Docker composer, just clone this repository and.. | ||||
|  | ||||
| ```bash | ||||
| $ docker-compose up -d | ||||
| $ docker compose up -d | ||||
| ``` | ||||
|  | ||||
| Docker standalone | ||||
| @@ -136,10 +145,10 @@ docker rm $(docker ps -a -f name=changedetection.io -q) | ||||
| docker run -d --restart always -p "127.0.0.1:5000:5000" -v datastore-volume:/datastore --name changedetection.io dgtlmoon/changedetection.io | ||||
| ``` | ||||
|  | ||||
| ### docker-compose | ||||
| ### docker compose | ||||
|  | ||||
| ```bash | ||||
| docker-compose pull && docker-compose up -d | ||||
| docker compose pull && docker compose up -d | ||||
| ``` | ||||
|  | ||||
| See the wiki for more information https://github.com/dgtlmoon/changedetection.io/wiki | ||||
| @@ -248,7 +257,7 @@ Supports managing the website watch list [via our API](https://changedetection.i | ||||
| Do you use changedetection.io to make money? does it save you time or money? Does it make your life easier? less stressful? Remember, we write this software when we should be doing actual paid work, we have to buy food and pay rent just like you. | ||||
|  | ||||
|  | ||||
| Firstly, consider taking out a [change detection monthly subscription - unlimited checks and watches](https://changedetection.io?src=github) , even if you don't use it, you still get the warm fuzzy feeling of helping out the project. (And who knows, you might just use it!) | ||||
| Firstly, consider taking out an officially supported [website change detection subscription](https://changedetection.io?src=github) , even if you don't use it, you still get the warm fuzzy feeling of helping out the project. (And who knows, you might just use it!) | ||||
|  | ||||
| Or directly donate an amount PayPal [](https://www.paypal.com/donate/?hosted_button_id=7CP6HR9ZCNDYJ) | ||||
|  | ||||
| @@ -272,3 +281,9 @@ I offer commercial support, this software is depended on by network security, ae | ||||
| ## Third-party licenses | ||||
|  | ||||
| changedetectionio.html_tools.elementpath_tostring: Copyright (c), 2018-2021, SISSA (Scuola Internazionale Superiore di Studi Avanzati), Licensed under [MIT license](https://github.com/sissaschool/elementpath/blob/master/LICENSE) | ||||
|  | ||||
| ## Contributors | ||||
|  | ||||
| Recognition of fantastic contributors to the project | ||||
|  | ||||
| - Constantin Hong https://github.com/Constantin1489 | ||||
|   | ||||
							
								
								
									
										21
									
								
								app.json
									
									
									
									
									
								
							
							
						
						| @@ -1,21 +0,0 @@ | ||||
| { | ||||
|   "name": "ChangeDetection.io", | ||||
|   "description": "The best and simplest self-hosted open source website change detection monitoring and notification service.", | ||||
|   "keywords": [ | ||||
|     "changedetection", | ||||
|     "website monitoring" | ||||
|   ], | ||||
|   "repository": "https://github.com/dgtlmoon/changedetection.io", | ||||
|   "success_url": "/", | ||||
|   "scripts": { | ||||
|   }, | ||||
|   "env": { | ||||
|   }, | ||||
|   "formation": { | ||||
|     "web": { | ||||
|       "quantity": 1, | ||||
|       "size": "free" | ||||
|     } | ||||
|   }, | ||||
|   "image": "heroku/python" | ||||
| } | ||||
| @@ -1,44 +1,6 @@ | ||||
| #!/usr/bin/python3 | ||||
|  | ||||
| # Entry-point for running from the CLI when not installed via Pip, Pip will handle the console_scripts entry_points's from setup.py | ||||
| # It's recommended to use `pip3 install changedetection.io` and start with `changedetection.py` instead, it will be linkd to your global path. | ||||
| # or Docker. | ||||
| # Read more https://github.com/dgtlmoon/changedetection.io/wiki | ||||
| # Only exists for direct CLI usage | ||||
|  | ||||
| from changedetectionio import changedetection | ||||
| import multiprocessing | ||||
| import sys | ||||
| import os | ||||
|  | ||||
| def sigchld_handler(_signo, _stack_frame): | ||||
|     import sys | ||||
|     print('Shutdown: Got SIGCHLD') | ||||
|     # https://stackoverflow.com/questions/40453496/python-multiprocessing-capturing-signals-to-restart-child-processes-or-shut-do | ||||
|     pid, status = os.waitpid(-1, os.WNOHANG | os.WUNTRACED | os.WCONTINUED) | ||||
|  | ||||
|     print('Sub-process: pid %d status %d' % (pid, status)) | ||||
|     if status != 0: | ||||
|         sys.exit(1) | ||||
|  | ||||
|     raise SystemExit | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|  | ||||
|     #signal.signal(signal.SIGCHLD, sigchld_handler) | ||||
|  | ||||
|     # The only way I could find to get Flask to shutdown, is to wrap it and then rely on the subsystem issuing SIGTERM/SIGKILL | ||||
|     parse_process = multiprocessing.Process(target=changedetection.main) | ||||
|     parse_process.daemon = True | ||||
|     parse_process.start() | ||||
|     import time | ||||
|  | ||||
|     try: | ||||
|         while True: | ||||
|             time.sleep(1) | ||||
|             if not parse_process.is_alive(): | ||||
|                 # Process died/crashed for some reason, exit with error set | ||||
|                 sys.exit(1) | ||||
|  | ||||
|     except KeyboardInterrupt: | ||||
|         #parse_process.terminate() not needed, because this process will issue it to the sub-process anyway | ||||
|         print ("Exited - CTRL+C") | ||||
| import changedetectionio | ||||
| changedetectionio.main() | ||||
|   | ||||
| @@ -30,7 +30,7 @@ class Watch(Resource): | ||||
|         self.update_q = kwargs['update_q'] | ||||
|  | ||||
|     # Get information about a single watch, excluding the history list (can be large) | ||||
|     # curl http://localhost:4000/api/v1/watch/<string:uuid> | ||||
|     # curl http://localhost:5000/api/v1/watch/<string:uuid> | ||||
|     # @todo - version2 - ?muted and ?paused should be able to be called together, return the watch struct not "OK" | ||||
|     # ?recheck=true | ||||
|     @auth.check_token | ||||
| @@ -39,9 +39,9 @@ class Watch(Resource): | ||||
|         @api {get} /api/v1/watch/:uuid Single watch - get data, recheck, pause, mute. | ||||
|         @apiDescription Retrieve watch information and set muted/paused status | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091  -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl "http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091?muted=unmuted"  -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl "http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091?paused=unpaused"  -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091  -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl "http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091?muted=unmuted"  -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl "http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091?paused=unpaused"  -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|         @apiName Watch | ||||
|         @apiGroup Watch | ||||
|         @apiParam {uuid} uuid Watch unique ID. | ||||
| @@ -76,7 +76,7 @@ class Watch(Resource): | ||||
|         # Properties are not returned as a JSON, so add the required props manually | ||||
|         watch['history_n'] = watch.history_n | ||||
|         watch['last_changed'] = watch.last_changed | ||||
|  | ||||
|         watch['viewed'] = watch.viewed | ||||
|         return watch | ||||
|  | ||||
|     @auth.check_token | ||||
| @@ -84,7 +84,7 @@ class Watch(Resource): | ||||
|         """ | ||||
|         @api {delete} /api/v1/watch/:uuid Delete a watch and related history | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X DELETE -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X DELETE -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|         @apiParam {uuid} uuid Watch unique ID. | ||||
|         @apiName Delete | ||||
|         @apiGroup Watch | ||||
| @@ -103,7 +103,7 @@ class Watch(Resource): | ||||
|         @api {put} /api/v1/watch/:uuid Update watch information | ||||
|         @apiExample {curl} Example usage: | ||||
|             Update (PUT) | ||||
|             curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X PUT -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"url": "https://my-nice.com" , "tag": "new list"}' | ||||
|             curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091 -X PUT -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"url": "https://my-nice.com" , "tag": "new list"}' | ||||
|  | ||||
|         @apiDescription Updates an existing watch using JSON, accepts the same structure as returned in <a href="#api-Watch-Watch">get single watch information</a> | ||||
|         @apiParam {uuid} uuid Watch unique ID. | ||||
| @@ -132,13 +132,14 @@ class WatchHistory(Resource): | ||||
|         self.datastore = kwargs['datastore'] | ||||
|  | ||||
|     # Get a list of available history for a watch by UUID | ||||
|     # curl http://localhost:4000/api/v1/watch/<string:uuid>/history | ||||
|     # curl http://localhost:5000/api/v1/watch/<string:uuid>/history | ||||
|     @auth.check_token | ||||
|     def get(self, uuid): | ||||
|         """ | ||||
|         @api {get} /api/v1/watch/<string:uuid>/history Get a list of all historical snapshots available for a watch | ||||
|         @apiDescription Requires `uuid`, returns list | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" | ||||
|             curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" | ||||
|             { | ||||
|                 "1676649279": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/cb7e9be8258368262246910e6a2a4c30.txt", | ||||
|                 "1677092785": "/tmp/data/6a4b7d5c-fee4-4616-9f43-4ac97046b595/e20db368d6fc633e34f559ff67bb4044.txt", | ||||
| @@ -166,7 +167,7 @@ class WatchSingleHistory(Resource): | ||||
|         @api {get} /api/v1/watch/<string:uuid>/history/<int:timestamp> Get single snapshot from watch | ||||
|         @apiDescription Requires watch `uuid` and `timestamp`. `timestamp` of "`latest`" for latest available snapshot, or <a href="#api-Watch_History-Get_list_of_available_stored_snapshots_for_watch">use the list returned here</a> | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:4000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history/1677092977 -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" | ||||
|             curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history/1677092977 -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" | ||||
|         @apiName Get single snapshot content | ||||
|         @apiGroup Watch History | ||||
|         @apiSuccess (200) {String} OK | ||||
| @@ -202,7 +203,7 @@ class CreateWatch(Resource): | ||||
|         @api {post} /api/v1/watch Create a single watch | ||||
|         @apiDescription Requires atleast `url` set, can accept the same structure as <a href="#api-Watch-Watch">get single watch information</a> to create. | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:4000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"url": "https://my-nice.com" , "tag": "nice list"}' | ||||
|             curl http://localhost:5000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" -d '{"url": "https://my-nice.com" , "tag": "nice list"}' | ||||
|         @apiName Create | ||||
|         @apiGroup Watch | ||||
|         @apiSuccess (200) {String} OK Was created | ||||
| @@ -245,7 +246,7 @@ class CreateWatch(Resource): | ||||
|         @api {get} /api/v1/watch List watches | ||||
|         @apiDescription Return concise list of available watches and some very basic info | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:4000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl http://localhost:5000/api/v1/watch -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             { | ||||
|                 "6a4b7d5c-fee4-4616-9f43-4ac97046b595": { | ||||
|                     "last_changed": 1677103794, | ||||
| @@ -280,11 +281,14 @@ class CreateWatch(Resource): | ||||
|             if tag_limit and not any(v.get('title').lower() == tag_limit for k, v in tags.items()): | ||||
|                 continue | ||||
|  | ||||
|             list[uuid] = {'url': watch['url'], | ||||
|                        'title': watch['title'], | ||||
|                        'last_checked': watch['last_checked'], | ||||
|                        'last_changed': watch.last_changed, | ||||
|                        'last_error': watch['last_error']} | ||||
|             list[uuid] = { | ||||
|                 'last_changed': watch.last_changed, | ||||
|                 'last_checked': watch['last_checked'], | ||||
|                 'last_error': watch['last_error'], | ||||
|                 'title': watch['title'], | ||||
|                 'url': watch['url'], | ||||
|                 'viewed': watch.viewed | ||||
|             } | ||||
|  | ||||
|         if request.args.get('recheck_all'): | ||||
|             for uuid in self.datastore.data['watching'].keys(): | ||||
| @@ -293,6 +297,61 @@ class CreateWatch(Resource): | ||||
|  | ||||
|         return list, 200 | ||||
|  | ||||
| class Import(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
|         # datastore is a black box dependency | ||||
|         self.datastore = kwargs['datastore'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     def post(self): | ||||
|         """ | ||||
|         @api {post} /api/v1/import Import a list of watched URLs | ||||
|         @apiDescription Accepts a line-feed separated list of URLs to import, additionally with ?tag_uuids=(tag  id), ?tag=(name), ?proxy={key}, ?dedupe=true (default true) one URL per line. | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/import --data-binary @list-of-sites.txt -H"x-api-key:8a111a21bc2f8f1dd9b9353bbd46049a" | ||||
|         @apiName Import | ||||
|         @apiGroup Watch | ||||
|         @apiSuccess (200) {List} OK List of watch UUIDs added | ||||
|         @apiSuccess (500) {String} ERR Some other error | ||||
|         """ | ||||
|  | ||||
|         extras = {} | ||||
|  | ||||
|         if request.args.get('proxy'): | ||||
|             plist = self.datastore.proxy_list | ||||
|             if not request.args.get('proxy') in plist: | ||||
|                 return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400 | ||||
|             else: | ||||
|                 extras['proxy'] = request.args.get('proxy') | ||||
|  | ||||
|         dedupe = strtobool(request.args.get('dedupe', 'true')) | ||||
|  | ||||
|         tags = request.args.get('tag') | ||||
|         tag_uuids = request.args.get('tag_uuids') | ||||
|  | ||||
|         if tag_uuids: | ||||
|             tag_uuids = tag_uuids.split(',') | ||||
|  | ||||
|         urls = request.get_data().decode('utf8').splitlines() | ||||
|         added = [] | ||||
|         allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False')) | ||||
|         for url in urls: | ||||
|             url = url.strip() | ||||
|             if not len(url): | ||||
|                 continue | ||||
|  | ||||
|             # If hosts that only contain alphanumerics are allowed ("localhost" for example) | ||||
|             if not validators.url(url, simple_host=allow_simplehost): | ||||
|                 return f"Invalid or unsupported URL - {url}", 400 | ||||
|  | ||||
|             if dedupe and self.datastore.url_exists(url): | ||||
|                 continue | ||||
|  | ||||
|             new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags, tag_uuids=tag_uuids) | ||||
|             added.append(new_uuid) | ||||
|  | ||||
|         return added | ||||
|  | ||||
| class SystemInfo(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
|         # datastore is a black box dependency | ||||
| @@ -305,7 +364,7 @@ class SystemInfo(Resource): | ||||
|         @api {get} /api/v1/systeminfo Return system info | ||||
|         @apiDescription Return some info about the current system state | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:4000/api/v1/systeminfo -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             curl http://localhost:5000/api/v1/systeminfo -H"x-api-key:813031b16330fe25e3780cf0325daa45" | ||||
|             HTTP/1.0 200 | ||||
|             { | ||||
|                 'queue_size': 10 , | ||||
|   | ||||
							
								
								
									
										7
									
								
								changedetectionio/blueprint/browser_steps/TODO.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,7 @@ | ||||
| - This needs an abstraction to directly handle the puppeteer connection methods | ||||
| - Then remove the playwright stuff | ||||
| - Remove hack redirect at line 65 changedetectionio/processors/__init__.py | ||||
|  | ||||
| The screenshots are base64 encoded/decoded which is very CPU intensive for large screenshots (in playwright) but not | ||||
| in the direct puppeteer connection (they are binary end to end) | ||||
|  | ||||
| @@ -4,31 +4,21 @@ | ||||
| # Why? | ||||
| # `browsersteps_playwright_browser_interface.chromium.connect_over_cdp()` will only run once without async() | ||||
| # - this flask app is not async() | ||||
| # - browserless has a single timeout/keepalive which applies to the session made at .connect_over_cdp() | ||||
| # - A single timeout/keepalive which applies to the session made at .connect_over_cdp() | ||||
| # | ||||
| # So it means that we must unfortunately for now just keep a single timer since .connect_over_cdp() was run | ||||
| # and know when that reaches timeout/keepalive :( when that time is up, restart the connection and tell the user | ||||
| # that their time is up, insert another coin. (reload) | ||||
| # | ||||
| # Bigger picture | ||||
| # - It's horrible that we have this click+wait deal, some nice socket.io solution using something similar | ||||
| # to what the browserless debug UI already gives us would be smarter.. | ||||
| # | ||||
| # OR | ||||
| # - Some API call that should be hacked into browserless or playwright that we can "/api/bump-keepalive/{session_id}/60" | ||||
| # So we can tell it that we need more time (run this on each action) | ||||
| # | ||||
| # OR | ||||
| # - use multiprocessing to bump this over to its own process and add some transport layer (queue/pipes) | ||||
|  | ||||
| from distutils.util import strtobool | ||||
| from flask import Blueprint, request, make_response | ||||
| import logging | ||||
| import os | ||||
| import re | ||||
|  | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from changedetectionio import login_optionally_required | ||||
| from changedetectionio.flask_app import login_optionally_required | ||||
| from loguru import logger | ||||
|  | ||||
| browsersteps_sessions = {} | ||||
| io_interface_context = None | ||||
| @@ -59,7 +49,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|             io_interface_context = io_interface_context.start() | ||||
|  | ||||
|         keepalive_ms = ((keepalive_seconds + 3) * 1000) | ||||
|         base_url = os.getenv('PLAYWRIGHT_DRIVER_URL', '') | ||||
|         base_url = os.getenv('PLAYWRIGHT_DRIVER_URL', '').strip('"') | ||||
|         a = "?" if not '?' in base_url else '&' | ||||
|         base_url += a + f"timeout={keepalive_ms}" | ||||
|  | ||||
| @@ -89,7 +79,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|                 if parsed.password: | ||||
|                     proxy['password'] = parsed.password | ||||
|  | ||||
|                 print("Browser Steps: UUID {} selected proxy {}".format(watch_uuid, proxy_url)) | ||||
|                 logger.debug(f"Browser Steps: UUID {watch_uuid} selected proxy {proxy_url}") | ||||
|  | ||||
|         # Tell Playwright to connect to Chrome and setup a new session via our stepper interface | ||||
|         browsersteps_start_session['browserstepper'] = browser_steps.browsersteps_live_ui( | ||||
| @@ -116,10 +106,10 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|         if not watch_uuid: | ||||
|             return make_response('No Watch UUID specified', 500) | ||||
|  | ||||
|         print("Starting connection with playwright") | ||||
|         logging.debug("browser_steps.py connecting") | ||||
|         logger.debug("Starting connection with playwright") | ||||
|         logger.debug("browser_steps.py connecting") | ||||
|         browsersteps_sessions[browsersteps_session_id] = start_browsersteps_session(watch_uuid) | ||||
|         print("Starting connection with playwright - done") | ||||
|         logger.debug("Starting connection with playwright - done") | ||||
|         return {'browsersteps_session_id': browsersteps_session_id} | ||||
|  | ||||
|     @login_optionally_required | ||||
| @@ -152,7 +142,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     @browser_steps_blueprint.route("/browsersteps_update", methods=['POST']) | ||||
|     def browsersteps_ui_update(): | ||||
|         import base64 | ||||
|         import playwright._impl._api_types | ||||
|         import playwright._impl._errors | ||||
|         global browsersteps_sessions | ||||
|         from changedetectionio.blueprint.browser_steps import browser_steps | ||||
|  | ||||
| @@ -190,7 +180,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|                                          optional_value=step_optional_value) | ||||
|  | ||||
|             except Exception as e: | ||||
|                 print("Exception when calling step operation", step_operation, str(e)) | ||||
|                 logger.error(f"Exception when calling step operation {step_operation} {str(e)}") | ||||
|                 # Try to find something of value to give back to the user | ||||
|                 return make_response(str(e).splitlines()[0], 401) | ||||
|  | ||||
|   | ||||
| @@ -4,6 +4,9 @@ import os | ||||
| import time | ||||
| import re | ||||
| from random import randint | ||||
| from loguru import logger | ||||
|  | ||||
| from changedetectionio.content_fetchers.base import manage_user_agent | ||||
|  | ||||
| # Two flags, tell the JS which of the "Selector" or "Value" field should be enabled in the front end | ||||
| # 0- off, 1- on | ||||
| @@ -53,7 +56,7 @@ class steppable_browser_interface(): | ||||
|         if call_action_name == 'choose_one': | ||||
|             return | ||||
|  | ||||
|         print("> action calling", call_action_name) | ||||
|         logger.debug(f"> Action calling '{call_action_name}'") | ||||
|         # https://playwright.dev/python/docs/selectors#xpath-selectors | ||||
|         if selector and selector.startswith('/') and not selector.startswith('//'): | ||||
|             selector = "xpath=" + selector | ||||
| @@ -72,7 +75,7 @@ class steppable_browser_interface(): | ||||
|  | ||||
|         action_handler(selector, optional_value) | ||||
|         self.page.wait_for_timeout(1.5 * 1000) | ||||
|         print("Call action done in", time.time() - now) | ||||
|         logger.debug(f"Call action done in {time.time()-now:.2f}s") | ||||
|  | ||||
|     def action_goto_url(self, selector=None, value=None): | ||||
|         # self.page.set_viewport_size({"width": 1280, "height": 5000}) | ||||
| @@ -82,7 +85,7 @@ class steppable_browser_interface(): | ||||
|         #and also wait for seconds ? | ||||
|         #await page.waitForTimeout(1000); | ||||
|         #await page.waitForTimeout(extra_wait_ms); | ||||
|         print("Time to goto URL ", time.time() - now) | ||||
|         logger.debug(f"Time to goto URL {time.time()-now:.2f}s") | ||||
|         return response | ||||
|  | ||||
|     def action_click_element_containing_text(self, selector=None, value=''): | ||||
| @@ -103,15 +106,15 @@ class steppable_browser_interface(): | ||||
|         return response | ||||
|  | ||||
|     def action_click_element(self, selector, value): | ||||
|         print("Clicking element") | ||||
|         logger.debug("Clicking element") | ||||
|         if not len(selector.strip()): | ||||
|             return | ||||
|  | ||||
|         self.page.click(selector=selector, timeout=30 * 1000, delay=randint(200, 500)) | ||||
|  | ||||
|     def action_click_element_if_exists(self, selector, value): | ||||
|         import playwright._impl._api_types as _api_types | ||||
|         print("Clicking element if exists") | ||||
|         import playwright._impl._errors as _api_types | ||||
|         logger.debug("Clicking element if exists") | ||||
|         if not len(selector.strip()): | ||||
|             return | ||||
|         try: | ||||
| @@ -123,6 +126,9 @@ class steppable_browser_interface(): | ||||
|             return | ||||
|  | ||||
|     def action_click_x_y(self, selector, value): | ||||
|         if not re.match(r'^\s?\d+\s?,\s?\d+\s?$', value): | ||||
|             raise Exception("'Click X,Y' step should be in the format of '100 , 90'") | ||||
|  | ||||
|         x, y = value.strip().split(',') | ||||
|         x = int(float(x.strip())) | ||||
|         y = int(float(y.strip())) | ||||
| @@ -165,7 +171,7 @@ class steppable_browser_interface(): | ||||
|         self.page.locator(selector, timeout=1000).uncheck(timeout=1000) | ||||
|  | ||||
|  | ||||
| # Responsible for maintaining a live 'context' with browserless | ||||
| # Responsible for maintaining a live 'context' with the chrome CDP | ||||
| # @todo - how long do contexts live for anyway? | ||||
| class browsersteps_live_ui(steppable_browser_interface): | ||||
|     context = None | ||||
| @@ -174,6 +180,7 @@ class browsersteps_live_ui(steppable_browser_interface): | ||||
|     stale = False | ||||
|     # bump and kill this if idle after X sec | ||||
|     age_start = 0 | ||||
|     headers = {} | ||||
|  | ||||
|     # use a special driver, maybe locally etc | ||||
|     command_executor = os.getenv( | ||||
| @@ -188,7 +195,8 @@ class browsersteps_live_ui(steppable_browser_interface): | ||||
|  | ||||
|     browser_type = os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').strip('"') | ||||
|  | ||||
|     def __init__(self, playwright_browser, proxy=None): | ||||
|     def __init__(self, playwright_browser, proxy=None, headers=None): | ||||
|         self.headers = headers or {} | ||||
|         self.age_start = time.time() | ||||
|         self.playwright_browser = playwright_browser | ||||
|         if self.context is None: | ||||
| @@ -202,16 +210,17 @@ class browsersteps_live_ui(steppable_browser_interface): | ||||
|  | ||||
|         # @todo handle multiple contexts, bind a unique id from the browser on each req? | ||||
|         self.context = self.playwright_browser.new_context( | ||||
|             # @todo | ||||
|             #                user_agent=request_headers['User-Agent'] if request_headers.get('User-Agent') else 'Mozilla/5.0', | ||||
|             #               proxy=self.proxy, | ||||
|             # This is needed to enable JavaScript execution on GitHub and others | ||||
|             bypass_csp=True, | ||||
|             # Should never be needed | ||||
|             accept_downloads=False, | ||||
|             proxy=proxy | ||||
|             accept_downloads=False,  # Should never be needed | ||||
|             bypass_csp=True,  # This is needed to enable JavaScript execution on GitHub and others | ||||
|             extra_http_headers=self.headers, | ||||
|             ignore_https_errors=True, | ||||
|             proxy=proxy, | ||||
|             service_workers=os.getenv('PLAYWRIGHT_SERVICE_WORKERS', 'allow'), | ||||
|             # Should be `allow` or `block` - sites like YouTube can transmit large amounts of data via Service Workers | ||||
|             user_agent=manage_user_agent(headers=self.headers), | ||||
|         ) | ||||
|  | ||||
|  | ||||
|         self.page = self.context.new_page() | ||||
|  | ||||
|         # self.page.set_default_navigation_timeout(keep_open) | ||||
| @@ -224,11 +233,11 @@ class browsersteps_live_ui(steppable_browser_interface): | ||||
|         # Listen for all console events and handle errors | ||||
|         self.page.on("console", lambda msg: print(f"Browser steps console - {msg.type}: {msg.text} {msg.args}")) | ||||
|  | ||||
|         print("Time to browser setup", time.time() - now) | ||||
|         logger.debug(f"Time to browser setup {time.time()-now:.2f}s") | ||||
|         self.page.wait_for_timeout(1 * 1000) | ||||
|  | ||||
|     def mark_as_closed(self): | ||||
|         print("Page closed, cleaning up..") | ||||
|         logger.debug("Page closed, cleaning up..") | ||||
|  | ||||
|     @property | ||||
|     def has_expired(self): | ||||
| @@ -239,7 +248,7 @@ class browsersteps_live_ui(steppable_browser_interface): | ||||
|     def get_current_state(self): | ||||
|         """Return the screenshot and interactive elements mapping, generally always called after action_()""" | ||||
|         from pkg_resources import resource_string | ||||
|         xpath_element_js = resource_string(__name__, "../../res/xpath_element_scraper.js").decode('utf-8') | ||||
|         xpath_element_js = resource_string(__name__, "../../content_fetchers/res/xpath_element_scraper.js").decode('utf-8') | ||||
|         now = time.time() | ||||
|         self.page.wait_for_timeout(1 * 1000) | ||||
|  | ||||
| @@ -254,7 +263,7 @@ class browsersteps_live_ui(steppable_browser_interface): | ||||
|         xpath_data = self.page.evaluate("async () => {" + xpath_element_js + "}") | ||||
|         # So the JS will find the smallest one first | ||||
|         xpath_data['size_pos'] = sorted(xpath_data['size_pos'], key=lambda k: k['width'] * k['height'], reverse=True) | ||||
|         print("Time to complete get_current_state of browser", time.time() - now) | ||||
|         logger.debug(f"Time to complete get_current_state of browser {time.time()-now:.2f}s") | ||||
|         # except | ||||
|         # playwright._impl._api_types.Error: Browser closed. | ||||
|         # @todo show some countdown timer? | ||||
| @@ -274,10 +283,10 @@ class browsersteps_live_ui(steppable_browser_interface): | ||||
|         self.page.evaluate("var include_filters=''") | ||||
|         from pkg_resources import resource_string | ||||
|         # The code that scrapes elements and makes a list of elements/size/position to click on in the VisualSelector | ||||
|         xpath_element_js = resource_string(__name__, "../../res/xpath_element_scraper.js").decode('utf-8') | ||||
|         from changedetectionio.content_fetcher import visualselector_xpath_selectors | ||||
|         xpath_element_js = resource_string(__name__, "../../content_fetchers/res/xpath_element_scraper.js").decode('utf-8') | ||||
|         from changedetectionio.content_fetchers import visualselector_xpath_selectors | ||||
|         xpath_element_js = xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) | ||||
|         xpath_data = self.page.evaluate("async () => {" + xpath_element_js + "}") | ||||
|         screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72))) | ||||
|         screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=int(os.getenv("SCREENSHOT_QUALITY", 72))) | ||||
|  | ||||
|         return (screenshot, xpath_data) | ||||
|   | ||||
| @@ -1,5 +1,4 @@ | ||||
| from playwright.sync_api import PlaywrightContextManager | ||||
| import asyncio | ||||
|  | ||||
| # So playwright wants to run as a context manager, but we do something horrible and hacky | ||||
| # we are holding the session open for as long as possible, then shutting it down, and opening a new one | ||||
|   | ||||
| @@ -1,14 +1,11 @@ | ||||
| from concurrent.futures import ThreadPoolExecutor | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
|  | ||||
| from functools import wraps | ||||
|  | ||||
| from flask import Blueprint | ||||
| from flask_login import login_required | ||||
|  | ||||
| from changedetectionio.processors import text_json_diff | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
|  | ||||
|  | ||||
| STATUS_CHECKING = 0 | ||||
| STATUS_FAILED = 1 | ||||
| STATUS_OK = 2 | ||||
| @@ -32,7 +29,8 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     @threadpool | ||||
|     def long_task(uuid, preferred_proxy): | ||||
|         import time | ||||
|         from changedetectionio import content_fetcher | ||||
|         from changedetectionio.content_fetchers import exceptions as content_fetcher_exceptions | ||||
|         from changedetectionio.processors import text_json_diff | ||||
|  | ||||
|         status = {'status': '', 'length': 0, 'text': ''} | ||||
|         from jinja2 import Environment, BaseLoader | ||||
| @@ -43,7 +41,7 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|             update_handler = text_json_diff.perform_site_check(datastore=datastore, watch_uuid=uuid) | ||||
|             update_handler.call_browser() | ||||
|         # title, size is len contents not len xfer | ||||
|         except content_fetcher.Non200ErrorCodeReceived as e: | ||||
|         except content_fetcher_exceptions.Non200ErrorCodeReceived as e: | ||||
|             if e.status_code == 404: | ||||
|                 status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but 404 (page not found)"}) | ||||
|             elif e.status_code == 403 or e.status_code == 401: | ||||
| @@ -52,12 +50,12 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|                 status.update({'status': 'ERROR', 'length': len(contents), 'text': f"Status code: {e.status_code}"}) | ||||
|         except text_json_diff.FilterNotFoundInResponse: | ||||
|             status.update({'status': 'OK', 'length': len(contents), 'text': f"OK but CSS/xPath filter not found (page changed layout?)"}) | ||||
|         except content_fetcher.EmptyReply as e: | ||||
|         except content_fetcher_exceptions.EmptyReply as e: | ||||
|             if e.status_code == 403 or e.status_code == 401: | ||||
|                 status.update({'status': 'ERROR OTHER', 'length': len(contents), 'text': f"Got empty reply with code {e.status_code} - Access denied"}) | ||||
|             else: | ||||
|                 status.update({'status': 'ERROR OTHER', 'length': len(contents) if contents else 0, 'text': f"Empty reply with code {e.status_code}, needs chrome?"}) | ||||
|         except content_fetcher.ReplyWithContentButNoText as e: | ||||
|         except content_fetcher_exceptions.ReplyWithContentButNoText as e: | ||||
|             txt = f"Got reply but with no content - Status code {e.status_code} - It's possible that the filters were found, but contained no usable text (or contained only an image)." | ||||
|             status.update({'status': 'ERROR', 'text': txt}) | ||||
|         except Exception as e: | ||||
|   | ||||
| @@ -18,8 +18,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q: PriorityQueue | ||||
|     def accept(uuid): | ||||
|         datastore.data['watching'][uuid]['track_ldjson_price_data'] = PRICE_DATA_TRACK_ACCEPT | ||||
|         update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False})) | ||||
|         return redirect(url_for("form_watch_checknow", uuid=uuid)) | ||||
|  | ||||
|         return redirect(url_for("index")) | ||||
|  | ||||
|     @login_required | ||||
|     @price_data_follower_blueprint.route("/<string:uuid>/reject", methods=['GET']) | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| from flask import Blueprint, request, make_response, render_template, flash, url_for, redirect | ||||
| from changedetectionio.store import ChangeDetectionStore | ||||
| from changedetectionio import login_optionally_required | ||||
| from changedetectionio.flask_app import login_optionally_required | ||||
|  | ||||
|  | ||||
| def construct_blueprint(datastore: ChangeDetectionStore): | ||||
| @@ -11,9 +11,10 @@ def construct_blueprint(datastore: ChangeDetectionStore): | ||||
|     def tags_overview_page(): | ||||
|         from .form import SingleTag | ||||
|         add_form = SingleTag(request.form) | ||||
|         sorted_tags = sorted(datastore.data['settings']['application'].get('tags').items(), key=lambda x: x[1]['title']) | ||||
|         output = render_template("groups-overview.html", | ||||
|                                  form=add_form, | ||||
|                                  available_tags=datastore.data['settings']['application'].get('tags', {}), | ||||
|                                  available_tags=sorted_tags, | ||||
|                                  ) | ||||
|  | ||||
|         return output | ||||
|   | ||||
| @@ -3,7 +3,7 @@ | ||||
| {% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %} | ||||
| {% from '_common_fields.jinja' import render_common_settings_form %} | ||||
| <script> | ||||
|     const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}"; | ||||
|     const notification_base_url="{{url_for('ajax_callback_send_notification_test', watch_uuid=uuid)}}"; | ||||
| </script> | ||||
|  | ||||
| <script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script> | ||||
|   | ||||
| @@ -40,7 +40,7 @@ | ||||
|                 <td colspan="3">No website organisational tags/groups configured</td> | ||||
|             </tr> | ||||
|             {% endif %} | ||||
|             {% for uuid, tag in available_tags.items()  %} | ||||
|             {% for uuid, tag in available_tags  %} | ||||
|             <tr id="{{ uuid }}" class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }}"> | ||||
|                 <td class="watch-controls"> | ||||
|                     <a class="link-mute state-{{'on' if tag.notification_muted else 'off'}}" href="{{url_for('tags.mute', uuid=tag.uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications" class="icon icon-mute" ></a> | ||||
|   | ||||
| @@ -1,153 +0,0 @@ | ||||
| #!/usr/bin/python3 | ||||
|  | ||||
| # Launch as a eventlet.wsgi server instance. | ||||
|  | ||||
| from distutils.util import strtobool | ||||
| from json.decoder import JSONDecodeError | ||||
|  | ||||
| import eventlet | ||||
| import eventlet.wsgi | ||||
| import getopt | ||||
| import os | ||||
| import signal | ||||
| import socket | ||||
| import sys | ||||
|  | ||||
| from . import store, changedetection_app, content_fetcher | ||||
| from . import __version__ | ||||
|  | ||||
| # Only global so we can access it in the signal handler | ||||
| app = None | ||||
| datastore = None | ||||
|  | ||||
| def sigterm_handler(_signo, _stack_frame): | ||||
|     global app | ||||
|     global datastore | ||||
| #    app.config.exit.set() | ||||
|     print('Shutdown: Got SIGTERM, DB saved to disk') | ||||
|     datastore.sync_to_json() | ||||
| #    raise SystemExit | ||||
|  | ||||
| def main(): | ||||
|     global datastore | ||||
|     global app | ||||
|  | ||||
|     datastore_path = None | ||||
|     do_cleanup = False | ||||
|     host = '' | ||||
|     ipv6_enabled = False | ||||
|     port = os.environ.get('PORT') or 5000 | ||||
|     ssl_mode = False | ||||
|  | ||||
|     # On Windows, create and use a default path. | ||||
|     if os.name == 'nt': | ||||
|         datastore_path = os.path.expandvars(r'%APPDATA%\changedetection.io') | ||||
|         os.makedirs(datastore_path, exist_ok=True) | ||||
|     else: | ||||
|         # Must be absolute so that send_from_directory doesnt try to make it relative to backend/ | ||||
|         datastore_path = os.path.join(os.getcwd(), "../datastore") | ||||
|  | ||||
|     try: | ||||
|         opts, args = getopt.getopt(sys.argv[1:], "6Ccsd:h:p:", "port") | ||||
|     except getopt.GetoptError: | ||||
|         print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path]') | ||||
|         sys.exit(2) | ||||
|  | ||||
|     create_datastore_dir = False | ||||
|  | ||||
|     for opt, arg in opts: | ||||
|         if opt == '-s': | ||||
|             ssl_mode = True | ||||
|  | ||||
|         if opt == '-h': | ||||
|             host = arg | ||||
|  | ||||
|         if opt == '-p': | ||||
|             port = int(arg) | ||||
|  | ||||
|         if opt == '-d': | ||||
|             datastore_path = arg | ||||
|  | ||||
|         if opt == '-6': | ||||
|             print ("Enabling IPv6 listen support") | ||||
|             ipv6_enabled = True | ||||
|  | ||||
|         # Cleanup (remove text files that arent in the index) | ||||
|         if opt == '-c': | ||||
|             do_cleanup = True | ||||
|  | ||||
|         # Create the datadir if it doesnt exist | ||||
|         if opt == '-C': | ||||
|             create_datastore_dir = True | ||||
|  | ||||
|     # isnt there some @thingy to attach to each route to tell it, that this route needs a datastore | ||||
|     app_config = {'datastore_path': datastore_path} | ||||
|  | ||||
|     if not os.path.isdir(app_config['datastore_path']): | ||||
|         if create_datastore_dir: | ||||
|             os.mkdir(app_config['datastore_path']) | ||||
|         else: | ||||
|             print( | ||||
|                 "ERROR: Directory path for the datastore '{}' does not exist, cannot start, please make sure the directory exists or specify a directory with the -d option.\n" | ||||
|                 "Or use the -C parameter to create the directory.".format(app_config['datastore_path']), file=sys.stderr) | ||||
|             sys.exit(2) | ||||
|  | ||||
|     try: | ||||
|         datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], version_tag=__version__) | ||||
|     except JSONDecodeError as e: | ||||
|         # Dont' start if the JSON DB looks corrupt | ||||
|         print ("ERROR: JSON DB or Proxy List JSON at '{}' appears to be corrupt, aborting".format(app_config['datastore_path'])) | ||||
|         print(str(e)) | ||||
|         return | ||||
|  | ||||
|     app = changedetection_app(app_config, datastore) | ||||
|  | ||||
|     signal.signal(signal.SIGTERM, sigterm_handler) | ||||
|  | ||||
|     # Go into cleanup mode | ||||
|     if do_cleanup: | ||||
|         datastore.remove_unused_snapshots() | ||||
|  | ||||
|     app.config['datastore_path'] = datastore_path | ||||
|  | ||||
|  | ||||
|     @app.context_processor | ||||
|     def inject_version(): | ||||
|         return dict(right_sticky="v{}".format(datastore.data['version_tag']), | ||||
|                     new_version_available=app.config['NEW_VERSION_AVAILABLE'], | ||||
|                     has_password=datastore.data['settings']['application']['password'] != False | ||||
|                     ) | ||||
|  | ||||
|     # Monitored websites will not receive a Referer header when a user clicks on an outgoing link. | ||||
|     # @Note: Incompatible with password login (and maybe other features) for now, submit a PR! | ||||
|     @app.after_request | ||||
|     def hide_referrer(response): | ||||
|         if strtobool(os.getenv("HIDE_REFERER", 'false')): | ||||
|             response.headers["Referrer-Policy"] = "no-referrer" | ||||
|  | ||||
|         return response | ||||
|  | ||||
|     # Proxy sub-directory support | ||||
|     # Set environment var USE_X_SETTINGS=1 on this script | ||||
|     # And then in your proxy_pass settings | ||||
|     # | ||||
|     #         proxy_set_header Host "localhost"; | ||||
|     #         proxy_set_header X-Forwarded-Prefix /app; | ||||
|  | ||||
|     if os.getenv('USE_X_SETTINGS'): | ||||
|         print ("USE_X_SETTINGS is ENABLED\n") | ||||
|         from werkzeug.middleware.proxy_fix import ProxyFix | ||||
|         app.wsgi_app = ProxyFix(app.wsgi_app, x_prefix=1, x_host=1) | ||||
|  | ||||
|     s_type = socket.AF_INET6 if ipv6_enabled else socket.AF_INET | ||||
|  | ||||
|     if ssl_mode: | ||||
|         # @todo finalise SSL config, but this should get you in the right direction if you need it. | ||||
|         eventlet.wsgi.server(eventlet.wrap_ssl(eventlet.listen((host, port), s_type), | ||||
|                                                certfile='cert.pem', | ||||
|                                                keyfile='privkey.pem', | ||||
|                                                server_side=True), app) | ||||
|  | ||||
|     else: | ||||
|         eventlet.wsgi.server(eventlet.listen((host, int(port)), s_type), app) | ||||
|  | ||||
| @@ -1,757 +0,0 @@ | ||||
| from abc import abstractmethod | ||||
| from distutils.util import strtobool | ||||
| from urllib.parse import urlparse | ||||
| import chardet | ||||
| import hashlib | ||||
| import json | ||||
| import logging | ||||
| import os | ||||
| import requests | ||||
| import sys | ||||
| import time | ||||
| import urllib.parse | ||||
|  | ||||
| visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4, header, footer, section, article, aside, details, main, nav, section, summary' | ||||
|  | ||||
|  | ||||
| class Non200ErrorCodeReceived(Exception): | ||||
|     def __init__(self, status_code, url, screenshot=None, xpath_data=None, page_html=None): | ||||
|         # Set this so we can use it in other parts of the app | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         self.screenshot = screenshot | ||||
|         self.xpath_data = xpath_data | ||||
|         self.page_text = None | ||||
|  | ||||
|         if page_html: | ||||
|             from changedetectionio import html_tools | ||||
|             self.page_text = html_tools.html_to_text(page_html) | ||||
|         return | ||||
|  | ||||
|  | ||||
| class checksumFromPreviousCheckWasTheSame(Exception): | ||||
|     def __init__(self): | ||||
|         return | ||||
|  | ||||
|  | ||||
| class JSActionExceptions(Exception): | ||||
|     def __init__(self, status_code, url, screenshot, message=''): | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         self.screenshot = screenshot | ||||
|         self.message = message | ||||
|         return | ||||
|  | ||||
|  | ||||
| class BrowserStepsStepTimout(Exception): | ||||
|     def __init__(self, step_n): | ||||
|         self.step_n = step_n | ||||
|         return | ||||
|  | ||||
|  | ||||
| class PageUnloadable(Exception): | ||||
|     def __init__(self, status_code, url, message, screenshot=False): | ||||
|         # Set this so we can use it in other parts of the app | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         self.screenshot = screenshot | ||||
|         self.message = message | ||||
|         return | ||||
|  | ||||
|  | ||||
| class EmptyReply(Exception): | ||||
|     def __init__(self, status_code, url, screenshot=None): | ||||
|         # Set this so we can use it in other parts of the app | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         self.screenshot = screenshot | ||||
|         return | ||||
|  | ||||
|  | ||||
| class ScreenshotUnavailable(Exception): | ||||
|     def __init__(self, status_code, url, page_html=None): | ||||
|         # Set this so we can use it in other parts of the app | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         if page_html: | ||||
|             from html_tools import html_to_text | ||||
|             self.page_text = html_to_text(page_html) | ||||
|         return | ||||
|  | ||||
|  | ||||
| class ReplyWithContentButNoText(Exception): | ||||
|     def __init__(self, status_code, url, screenshot=None, has_filters=False, html_content=''): | ||||
|         # Set this so we can use it in other parts of the app | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         self.screenshot = screenshot | ||||
|         self.has_filters = has_filters | ||||
|         self.html_content = html_content | ||||
|         return | ||||
|  | ||||
|  | ||||
| class Fetcher(): | ||||
|     browser_steps = None | ||||
|     browser_steps_screenshot_path = None | ||||
|     content = None | ||||
|     error = None | ||||
|     fetcher_description = "No description" | ||||
|     browser_connection_url = None | ||||
|     headers = {} | ||||
|     status_code = None | ||||
|     webdriver_js_execute_code = None | ||||
|     xpath_data = None | ||||
|     xpath_element_js = "" | ||||
|     instock_data = None | ||||
|     instock_data_js = "" | ||||
|  | ||||
|     # Will be needed in the future by the VisualSelector, always get this where possible. | ||||
|     screenshot = False | ||||
|     system_http_proxy = os.getenv('HTTP_PROXY') | ||||
|     system_https_proxy = os.getenv('HTTPS_PROXY') | ||||
|  | ||||
|     # Time ONTOP of the system defined env minimum time | ||||
|     render_extract_delay = 0 | ||||
|  | ||||
|     def __init__(self): | ||||
|         from pkg_resources import resource_string | ||||
|         # The code that scrapes elements and makes a list of elements/size/position to click on in the VisualSelector | ||||
|         self.xpath_element_js = resource_string(__name__, "res/xpath_element_scraper.js").decode('utf-8') | ||||
|         self.instock_data_js = resource_string(__name__, "res/stock-not-in-stock.js").decode('utf-8') | ||||
|  | ||||
|     @abstractmethod | ||||
|     def get_error(self): | ||||
|         return self.error | ||||
|  | ||||
|     @abstractmethod | ||||
|     def run(self, | ||||
|             url, | ||||
|             timeout, | ||||
|             request_headers, | ||||
|             request_body, | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False): | ||||
|         # Should set self.error, self.status_code and self.content | ||||
|         pass | ||||
|  | ||||
|     @abstractmethod | ||||
|     def quit(self): | ||||
|         return | ||||
|  | ||||
|     @abstractmethod | ||||
|     def get_last_status_code(self): | ||||
|         return self.status_code | ||||
|  | ||||
|     @abstractmethod | ||||
|     def screenshot_step(self, step_n): | ||||
|         return None | ||||
|  | ||||
|     @abstractmethod | ||||
|     # Return true/false if this checker is ready to run, in the case it needs todo some special config check etc | ||||
|     def is_ready(self): | ||||
|         return True | ||||
|  | ||||
|     def get_all_headers(self): | ||||
|         """ | ||||
|         Get all headers but ensure all keys are lowercase | ||||
|         :return: | ||||
|         """ | ||||
|         return {k.lower(): v for k, v in self.headers.items()} | ||||
|  | ||||
|     def browser_steps_get_valid_steps(self): | ||||
|         if self.browser_steps is not None and len(self.browser_steps): | ||||
|             valid_steps = filter( | ||||
|                 lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'), | ||||
|                 self.browser_steps) | ||||
|  | ||||
|             return valid_steps | ||||
|  | ||||
|         return None | ||||
|  | ||||
|     def iterate_browser_steps(self): | ||||
|         from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface | ||||
|         from playwright._impl._api_types import TimeoutError | ||||
|         from jinja2 import Environment | ||||
|         jinja2_env = Environment(extensions=['jinja2_time.TimeExtension']) | ||||
|  | ||||
|         step_n = 0 | ||||
|  | ||||
|         if self.browser_steps is not None and len(self.browser_steps): | ||||
|             interface = steppable_browser_interface() | ||||
|             interface.page = self.page | ||||
|             valid_steps = self.browser_steps_get_valid_steps() | ||||
|  | ||||
|             for step in valid_steps: | ||||
|                 step_n += 1 | ||||
|                 print(">> Iterating check - browser Step n {} - {}...".format(step_n, step['operation'])) | ||||
|                 self.screenshot_step("before-" + str(step_n)) | ||||
|                 self.save_step_html("before-" + str(step_n)) | ||||
|                 try: | ||||
|                     optional_value = step['optional_value'] | ||||
|                     selector = step['selector'] | ||||
|                     # Support for jinja2 template in step values, with date module added | ||||
|                     if '{%' in step['optional_value'] or '{{' in step['optional_value']: | ||||
|                         optional_value = str(jinja2_env.from_string(step['optional_value']).render()) | ||||
|                     if '{%' in step['selector'] or '{{' in step['selector']: | ||||
|                         selector = str(jinja2_env.from_string(step['selector']).render()) | ||||
|  | ||||
|                     getattr(interface, "call_action")(action_name=step['operation'], | ||||
|                                                       selector=selector, | ||||
|                                                       optional_value=optional_value) | ||||
|                     self.screenshot_step(step_n) | ||||
|                     self.save_step_html(step_n) | ||||
|                 except TimeoutError as e: | ||||
|                     print(str(e)) | ||||
|                     # Stop processing here | ||||
|                     raise BrowserStepsStepTimout(step_n=step_n) | ||||
|  | ||||
|     # It's always good to reset these | ||||
|     def delete_browser_steps_screenshots(self): | ||||
|         import glob | ||||
|         if self.browser_steps_screenshot_path is not None: | ||||
|             dest = os.path.join(self.browser_steps_screenshot_path, 'step_*.jpeg') | ||||
|             files = glob.glob(dest) | ||||
|             for f in files: | ||||
|                 if os.path.isfile(f): | ||||
|                     os.unlink(f) | ||||
|  | ||||
|  | ||||
| #   Maybe for the future, each fetcher provides its own diff output, could be used for text, image | ||||
| #   the current one would return javascript output (as we use JS to generate the diff) | ||||
| # | ||||
| def available_fetchers(): | ||||
|     # See the if statement at the bottom of this file for how we switch between playwright and webdriver | ||||
|     import inspect | ||||
|     p = [] | ||||
|     for name, obj in inspect.getmembers(sys.modules[__name__], inspect.isclass): | ||||
|         if inspect.isclass(obj): | ||||
|             # @todo html_ is maybe better as fetcher_ or something | ||||
|             # In this case, make sure to edit the default one in store.py and fetch_site_status.py | ||||
|             if name.startswith('html_'): | ||||
|                 t = tuple([name, obj.fetcher_description]) | ||||
|                 p.append(t) | ||||
|  | ||||
|     return p | ||||
|  | ||||
|  | ||||
| class base_html_playwright(Fetcher): | ||||
|     fetcher_description = "Playwright {}/Javascript".format( | ||||
|         os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').capitalize() | ||||
|     ) | ||||
|     if os.getenv("PLAYWRIGHT_DRIVER_URL"): | ||||
|         fetcher_description += " via '{}'".format(os.getenv("PLAYWRIGHT_DRIVER_URL")) | ||||
|  | ||||
|     browser_type = '' | ||||
|     command_executor = '' | ||||
|  | ||||
|     # Configs for Proxy setup | ||||
|     # In the ENV vars, is prefixed with "playwright_proxy_", so it is for example "playwright_proxy_server" | ||||
|     playwright_proxy_settings_mappings = ['bypass', 'server', 'username', 'password'] | ||||
|  | ||||
|     proxy = None | ||||
|  | ||||
|     def __init__(self, proxy_override=None, browser_connection_url=None): | ||||
|         super().__init__() | ||||
|  | ||||
|         self.browser_type = os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').strip('"') | ||||
|  | ||||
|         # .strip('"') is going to save someone a lot of time when they accidently wrap the env value | ||||
|         if not browser_connection_url: | ||||
|             self.browser_connection_url = os.getenv("PLAYWRIGHT_DRIVER_URL", 'ws://playwright-chrome:3000').strip('"') | ||||
|         else: | ||||
|             self.browser_connection_url = browser_connection_url | ||||
|  | ||||
|         # If any proxy settings are enabled, then we should setup the proxy object | ||||
|         proxy_args = {} | ||||
|         for k in self.playwright_proxy_settings_mappings: | ||||
|             v = os.getenv('playwright_proxy_' + k, False) | ||||
|             if v: | ||||
|                 proxy_args[k] = v.strip('"') | ||||
|  | ||||
|         if proxy_args: | ||||
|             self.proxy = proxy_args | ||||
|  | ||||
|         # allow per-watch proxy selection override | ||||
|         if proxy_override: | ||||
|             self.proxy = {'server': proxy_override} | ||||
|  | ||||
|         if self.proxy: | ||||
|             # Playwright needs separate username and password values | ||||
|             parsed = urlparse(self.proxy.get('server')) | ||||
|             if parsed.username: | ||||
|                 self.proxy['username'] = parsed.username | ||||
|                 self.proxy['password'] = parsed.password | ||||
|  | ||||
|     def screenshot_step(self, step_n=''): | ||||
|         screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=85) | ||||
|  | ||||
|         if self.browser_steps_screenshot_path is not None: | ||||
|             destination = os.path.join(self.browser_steps_screenshot_path, 'step_{}.jpeg'.format(step_n)) | ||||
|             logging.debug("Saving step screenshot to {}".format(destination)) | ||||
|             with open(destination, 'wb') as f: | ||||
|                 f.write(screenshot) | ||||
|  | ||||
|     def save_step_html(self, step_n): | ||||
|         content = self.page.content() | ||||
|         destination = os.path.join(self.browser_steps_screenshot_path, 'step_{}.html'.format(step_n)) | ||||
|         logging.debug("Saving step HTML to {}".format(destination)) | ||||
|         with open(destination, 'w') as f: | ||||
|             f.write(content) | ||||
|  | ||||
|     def run_fetch_browserless_puppeteer(self, | ||||
|             url, | ||||
|             timeout, | ||||
|             request_headers, | ||||
|             request_body, | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False): | ||||
|  | ||||
|         from pkg_resources import resource_string | ||||
|  | ||||
|         extra_wait_ms = (int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay) * 1000 | ||||
|  | ||||
|         self.xpath_element_js = self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) | ||||
|         code = resource_string(__name__, "res/puppeteer_fetch.js").decode('utf-8') | ||||
|         # In the future inject this is a proper JS package | ||||
|         code = code.replace('%xpath_scrape_code%', self.xpath_element_js) | ||||
|         code = code.replace('%instock_scrape_code%', self.instock_data_js) | ||||
|  | ||||
|         from requests.exceptions import ConnectTimeout, ReadTimeout | ||||
|         wait_browserless_seconds = 240 | ||||
|  | ||||
|         browserless_function_url = os.getenv('BROWSERLESS_FUNCTION_URL') | ||||
|         from urllib.parse import urlparse | ||||
|         if not browserless_function_url: | ||||
|             # Convert/try to guess from PLAYWRIGHT_DRIVER_URL | ||||
|             o = urlparse(os.getenv('PLAYWRIGHT_DRIVER_URL')) | ||||
|             browserless_function_url = o._replace(scheme="http")._replace(path="function").geturl() | ||||
|  | ||||
|  | ||||
|         # Append proxy connect string | ||||
|         if self.proxy: | ||||
|             # Remove username/password if it exists in the URL or you will receive "ERR_NO_SUPPORTED_PROXIES" error | ||||
|             # Actual authentication handled by Puppeteer/node | ||||
|             o = urlparse(self.proxy.get('server')) | ||||
|             proxy_url = urllib.parse.quote(o._replace(netloc="{}:{}".format(o.hostname, o.port)).geturl()) | ||||
|             browserless_function_url = f"{browserless_function_url}&--proxy-server={proxy_url}" | ||||
|  | ||||
|         try: | ||||
|             amp = '&' if '?' in browserless_function_url else '?' | ||||
|             response = requests.request( | ||||
|                 method="POST", | ||||
|                 json={ | ||||
|                     "code": code, | ||||
|                     "context": { | ||||
|                         # Very primitive disk cache - USE WITH EXTREME CAUTION | ||||
|                         # Run browserless container  with -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" | ||||
|                         'disk_cache_dir': os.getenv("PUPPETEER_DISK_CACHE", False), # or path to disk cache ending in /, ie /tmp/cache/ | ||||
|                         'execute_js': self.webdriver_js_execute_code, | ||||
|                         'extra_wait_ms': extra_wait_ms, | ||||
|                         'include_filters': current_include_filters, | ||||
|                         'req_headers': request_headers, | ||||
|                         'screenshot_quality': int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)), | ||||
|                         'url': url, | ||||
|                         'user_agent': {k.lower(): v for k, v in request_headers.items()}.get('user-agent', None), | ||||
|                         'proxy_username': self.proxy.get('username', '') if self.proxy else False, | ||||
|                         'proxy_password': self.proxy.get('password', '') if self.proxy and self.proxy.get('username') else False, | ||||
|                         'no_cache_list': [ | ||||
|                             'twitter', | ||||
|                             '.pdf' | ||||
|                         ], | ||||
|                         # Could use https://github.com/easylist/easylist here, or install a plugin | ||||
|                         'block_url_list': [ | ||||
|                             'adnxs.com', | ||||
|                             'analytics.twitter.com', | ||||
|                             'doubleclick.net', | ||||
|                             'google-analytics.com', | ||||
|                             'googletagmanager', | ||||
|                             'trustpilot.com' | ||||
|                         ] | ||||
|                     } | ||||
|                 }, | ||||
|                 # @todo /function needs adding ws:// to http:// rebuild this | ||||
|                 url=browserless_function_url+f"{amp}--disable-features=AudioServiceOutOfProcess&dumpio=true&--disable-remote-fonts", | ||||
|                 timeout=wait_browserless_seconds) | ||||
|  | ||||
|         except ReadTimeout: | ||||
|             raise PageUnloadable(url=url, status_code=None, message=f"No response from browserless in {wait_browserless_seconds}s") | ||||
|         except ConnectTimeout: | ||||
|             raise PageUnloadable(url=url, status_code=None, message=f"Timed out connecting to browserless, retrying..") | ||||
|         else: | ||||
|             # 200 Here means that the communication to browserless worked only, not the page state | ||||
|             if response.status_code == 200: | ||||
|                 import base64 | ||||
|  | ||||
|                 x = response.json() | ||||
|                 if not x.get('screenshot'): | ||||
|                     # https://github.com/puppeteer/puppeteer/blob/v1.0.0/docs/troubleshooting.md#tips | ||||
|                     # https://github.com/puppeteer/puppeteer/issues/1834 | ||||
|                     # https://github.com/puppeteer/puppeteer/issues/1834#issuecomment-381047051 | ||||
|                     # Check your memory is shared and big enough | ||||
|                     raise ScreenshotUnavailable(url=url, status_code=None) | ||||
|  | ||||
|                 if not x.get('content', '').strip(): | ||||
|                     raise EmptyReply(url=url, status_code=None) | ||||
|  | ||||
|                 if x.get('status_code', 200) != 200 and not ignore_status_codes: | ||||
|                     raise Non200ErrorCodeReceived(url=url, status_code=x.get('status_code', 200), page_html=x['content']) | ||||
|  | ||||
|                 self.content = x.get('content') | ||||
|                 self.headers = x.get('headers') | ||||
|                 self.instock_data = x.get('instock_data') | ||||
|                 self.screenshot = base64.b64decode(x.get('screenshot')) | ||||
|                 self.status_code = x.get('status_code') | ||||
|                 self.xpath_data = x.get('xpath_data') | ||||
|  | ||||
|             else: | ||||
|                 # Some other error from browserless | ||||
|                 raise PageUnloadable(url=url, status_code=None, message=response.content.decode('utf-8')) | ||||
|  | ||||
|     def run(self, | ||||
|             url, | ||||
|             timeout, | ||||
|             request_headers, | ||||
|             request_body, | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False): | ||||
|  | ||||
|         # For now, USE_EXPERIMENTAL_PUPPETEER_FETCH is not supported by watches with BrowserSteps (for now!) | ||||
|         if not self.browser_steps and os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH'): | ||||
|             if strtobool(os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH')): | ||||
|                 # Temporary backup solution until we rewrite the playwright code | ||||
|                 return self.run_fetch_browserless_puppeteer( | ||||
|                     url, | ||||
|                     timeout, | ||||
|                     request_headers, | ||||
|                     request_body, | ||||
|                     request_method, | ||||
|                     ignore_status_codes, | ||||
|                     current_include_filters, | ||||
|                     is_binary) | ||||
|  | ||||
|         from playwright.sync_api import sync_playwright | ||||
|         import playwright._impl._api_types | ||||
|  | ||||
|         self.delete_browser_steps_screenshots() | ||||
|         response = None | ||||
|  | ||||
|         with sync_playwright() as p: | ||||
|             browser_type = getattr(p, self.browser_type) | ||||
|  | ||||
|             # Seemed to cause a connection Exception even tho I can see it connect | ||||
|             # self.browser = browser_type.connect(self.command_executor, timeout=timeout*1000) | ||||
|             # 60,000 connection timeout only | ||||
|             browser = browser_type.connect_over_cdp(self.browser_connection_url, timeout=60000) | ||||
|  | ||||
|             # SOCKS5 with authentication is not supported (yet) | ||||
|             # https://github.com/microsoft/playwright/issues/10567 | ||||
|  | ||||
|             # Set user agent to prevent Cloudflare from blocking the browser | ||||
|             # Use the default one configured in the App.py model that's passed from fetch_site_status.py | ||||
|             context = browser.new_context( | ||||
|                 user_agent={k.lower(): v for k, v in request_headers.items()}.get('user-agent', None), | ||||
|                 proxy=self.proxy, | ||||
|                 # This is needed to enable JavaScript execution on GitHub and others | ||||
|                 bypass_csp=True, | ||||
|                 # Should be `allow` or `block` - sites like YouTube can transmit large amounts of data via Service Workers | ||||
|                 service_workers=os.getenv('PLAYWRIGHT_SERVICE_WORKERS', 'allow'), | ||||
|                 # Should never be needed | ||||
|                 accept_downloads=False | ||||
|             ) | ||||
|  | ||||
|             self.page = context.new_page() | ||||
|             if len(request_headers): | ||||
|                 context.set_extra_http_headers(request_headers) | ||||
|  | ||||
|             # Listen for all console events and handle errors | ||||
|             self.page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}")) | ||||
|  | ||||
|             # Re-use as much code from browser steps as possible so its the same | ||||
|             from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface | ||||
|             browsersteps_interface = steppable_browser_interface() | ||||
|             browsersteps_interface.page = self.page | ||||
|  | ||||
|             response = browsersteps_interface.action_goto_url(value=url) | ||||
|             self.headers = response.all_headers() | ||||
|  | ||||
|             if response is None: | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 print("Content Fetcher > Response object was none") | ||||
|                 raise EmptyReply(url=url, status_code=None) | ||||
|  | ||||
|             try: | ||||
|                 if self.webdriver_js_execute_code is not None and len(self.webdriver_js_execute_code): | ||||
|                     browsersteps_interface.action_execute_js(value=self.webdriver_js_execute_code, selector=None) | ||||
|             except playwright._impl._api_types.TimeoutError as e: | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 # This can be ok, we will try to grab what we could retrieve | ||||
|                 pass | ||||
|             except Exception as e: | ||||
|                 print("Content Fetcher > Other exception when executing custom JS code", str(e)) | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 raise PageUnloadable(url=url, status_code=None, message=str(e)) | ||||
|  | ||||
|             extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay | ||||
|             self.page.wait_for_timeout(extra_wait * 1000) | ||||
|  | ||||
|  | ||||
|             self.status_code = response.status | ||||
|  | ||||
|             if self.status_code != 200 and not ignore_status_codes: | ||||
|  | ||||
|                 screenshot=self.page.screenshot(type='jpeg', full_page=True, | ||||
|                                      quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72))) | ||||
|  | ||||
|                 raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, screenshot=screenshot) | ||||
|  | ||||
|             if len(self.page.content().strip()) == 0: | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 print("Content Fetcher > Content was empty") | ||||
|                 raise EmptyReply(url=url, status_code=response.status) | ||||
|  | ||||
|             # Run Browser Steps here | ||||
|             if self.browser_steps_get_valid_steps(): | ||||
|                 self.iterate_browser_steps() | ||||
|                  | ||||
|             self.page.wait_for_timeout(extra_wait * 1000) | ||||
|  | ||||
|             # So we can find an element on the page where its selector was entered manually (maybe not xPath etc) | ||||
|             if current_include_filters is not None: | ||||
|                 self.page.evaluate("var include_filters={}".format(json.dumps(current_include_filters))) | ||||
|             else: | ||||
|                 self.page.evaluate("var include_filters=''") | ||||
|  | ||||
|             self.xpath_data = self.page.evaluate( | ||||
|                 "async () => {" + self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) + "}") | ||||
|             self.instock_data = self.page.evaluate("async () => {" + self.instock_data_js + "}") | ||||
|  | ||||
|             self.content = self.page.content() | ||||
|             # Bug 3 in Playwright screenshot handling | ||||
|             # Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it | ||||
|             # JPEG is better here because the screenshots can be very very large | ||||
|  | ||||
|             # Screenshots also travel via the ws:// (websocket) meaning that the binary data is base64 encoded | ||||
|             # which will significantly increase the IO size between the server and client, it's recommended to use the lowest | ||||
|             # acceptable screenshot quality here | ||||
|             try: | ||||
|                 # The actual screenshot | ||||
|                 self.screenshot = self.page.screenshot(type='jpeg', full_page=True, | ||||
|                                                        quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72))) | ||||
|             except Exception as e: | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 raise ScreenshotUnavailable(url=url, status_code=response.status_code) | ||||
|  | ||||
|             context.close() | ||||
|             browser.close() | ||||
|  | ||||
|  | ||||
| class base_html_webdriver(Fetcher): | ||||
|     if os.getenv("WEBDRIVER_URL"): | ||||
|         fetcher_description = "WebDriver Chrome/Javascript via '{}'".format(os.getenv("WEBDRIVER_URL")) | ||||
|     else: | ||||
|         fetcher_description = "WebDriver Chrome/Javascript" | ||||
|  | ||||
|     # Configs for Proxy setup | ||||
|     # In the ENV vars, is prefixed with "webdriver_", so it is for example "webdriver_sslProxy" | ||||
|     selenium_proxy_settings_mappings = ['proxyType', 'ftpProxy', 'httpProxy', 'noProxy', | ||||
|                                         'proxyAutoconfigUrl', 'sslProxy', 'autodetect', | ||||
|                                         'socksProxy', 'socksVersion', 'socksUsername', 'socksPassword'] | ||||
|     proxy = None | ||||
|  | ||||
|     def __init__(self, proxy_override=None, browser_connection_url=None): | ||||
|         super().__init__() | ||||
|         from selenium.webdriver.common.proxy import Proxy as SeleniumProxy | ||||
|  | ||||
|         # .strip('"') is going to save someone a lot of time when they accidently wrap the env value | ||||
|         if not browser_connection_url: | ||||
|             self.browser_connection_url = os.getenv("WEBDRIVER_URL", 'http://browser-chrome:4444/wd/hub').strip('"') | ||||
|         else: | ||||
|             self.browser_connection_url = browser_connection_url | ||||
|  | ||||
|         # If any proxy settings are enabled, then we should setup the proxy object | ||||
|         proxy_args = {} | ||||
|         for k in self.selenium_proxy_settings_mappings: | ||||
|             v = os.getenv('webdriver_' + k, False) | ||||
|             if v: | ||||
|                 proxy_args[k] = v.strip('"') | ||||
|  | ||||
|         # Map back standard HTTP_ and HTTPS_PROXY to webDriver httpProxy/sslProxy | ||||
|         if not proxy_args.get('webdriver_httpProxy') and self.system_http_proxy: | ||||
|             proxy_args['httpProxy'] = self.system_http_proxy | ||||
|         if not proxy_args.get('webdriver_sslProxy') and self.system_https_proxy: | ||||
|             proxy_args['httpsProxy'] = self.system_https_proxy | ||||
|  | ||||
|         # Allows override the proxy on a per-request basis | ||||
|         if proxy_override is not None: | ||||
|             proxy_args['httpProxy'] = proxy_override | ||||
|  | ||||
|         if proxy_args: | ||||
|             self.proxy = SeleniumProxy(raw=proxy_args) | ||||
|  | ||||
|     def run(self, | ||||
|             url, | ||||
|             timeout, | ||||
|             request_headers, | ||||
|             request_body, | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False): | ||||
|  | ||||
|         from selenium import webdriver | ||||
|         from selenium.webdriver.chrome.options import Options as ChromeOptions | ||||
|         from selenium.common.exceptions import WebDriverException | ||||
|         # request_body, request_method unused for now, until some magic in the future happens. | ||||
|  | ||||
|         options = ChromeOptions() | ||||
|         if self.proxy: | ||||
|             options.proxy = self.proxy | ||||
|  | ||||
|         self.driver = webdriver.Remote( | ||||
|             command_executor=self.browser_connection_url, | ||||
|             options=options) | ||||
|  | ||||
|         try: | ||||
|             self.driver.get(url) | ||||
|         except WebDriverException as e: | ||||
|             # Be sure we close the session window | ||||
|             self.quit() | ||||
|             raise | ||||
|  | ||||
|         self.driver.set_window_size(1280, 1024) | ||||
|         self.driver.implicitly_wait(int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5))) | ||||
|  | ||||
|         if self.webdriver_js_execute_code is not None: | ||||
|             self.driver.execute_script(self.webdriver_js_execute_code) | ||||
|             # Selenium doesn't automatically wait for actions as good as Playwright, so wait again | ||||
|             self.driver.implicitly_wait(int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5))) | ||||
|  | ||||
|         # @todo - how to check this? is it possible? | ||||
|         self.status_code = 200 | ||||
|         # @todo somehow we should try to get this working for WebDriver | ||||
|         # raise EmptyReply(url=url, status_code=r.status_code) | ||||
|  | ||||
|         # @todo - dom wait loaded? | ||||
|         time.sleep(int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay) | ||||
|         self.content = self.driver.page_source | ||||
|         self.headers = {} | ||||
|  | ||||
|         self.screenshot = self.driver.get_screenshot_as_png() | ||||
|  | ||||
|     # Does the connection to the webdriver work? run a test connection. | ||||
|     def is_ready(self): | ||||
|         from selenium import webdriver | ||||
|         from selenium.webdriver.chrome.options import Options as ChromeOptions | ||||
|  | ||||
|         self.driver = webdriver.Remote( | ||||
|             command_executor=self.command_executor, | ||||
|             options=ChromeOptions()) | ||||
|  | ||||
|         # driver.quit() seems to cause better exceptions | ||||
|         self.quit() | ||||
|         return True | ||||
|  | ||||
|     def quit(self): | ||||
|         if self.driver: | ||||
|             try: | ||||
|                 self.driver.quit() | ||||
|             except Exception as e: | ||||
|                 print("Content Fetcher > Exception in chrome shutdown/quit" + str(e)) | ||||
|  | ||||
|  | ||||
| # "html_requests" is listed as the default fetcher in store.py! | ||||
| class html_requests(Fetcher): | ||||
|     fetcher_description = "Basic fast Plaintext/HTTP Client" | ||||
|  | ||||
|     def __init__(self, proxy_override=None, browser_connection_url=None): | ||||
|         super().__init__() | ||||
|         self.proxy_override = proxy_override | ||||
|         # browser_connection_url is none because its always 'launched locally' | ||||
|  | ||||
|     def run(self, | ||||
|             url, | ||||
|             timeout, | ||||
|             request_headers, | ||||
|             request_body, | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False): | ||||
|  | ||||
|         # Make requests use a more modern looking user-agent | ||||
|         if not {k.lower(): v for k, v in request_headers.items()}.get('user-agent', None): | ||||
|             request_headers['User-Agent'] = os.getenv("DEFAULT_SETTINGS_HEADERS_USERAGENT", | ||||
|                                                       'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36') | ||||
|  | ||||
|         proxies = {} | ||||
|  | ||||
|         # Allows override the proxy on a per-request basis | ||||
|  | ||||
|         # https://requests.readthedocs.io/en/latest/user/advanced/#socks | ||||
|         # Should also work with `socks5://user:pass@host:port` type syntax. | ||||
|  | ||||
|         if self.proxy_override: | ||||
|             proxies = {'http': self.proxy_override, 'https': self.proxy_override, 'ftp': self.proxy_override} | ||||
|         else: | ||||
|             if self.system_http_proxy: | ||||
|                 proxies['http'] = self.system_http_proxy | ||||
|             if self.system_https_proxy: | ||||
|                 proxies['https'] = self.system_https_proxy | ||||
|  | ||||
|         r = requests.request(method=request_method, | ||||
|                              data=request_body, | ||||
|                              url=url, | ||||
|                              headers=request_headers, | ||||
|                              timeout=timeout, | ||||
|                              proxies=proxies, | ||||
|                              verify=False) | ||||
|  | ||||
|         # If the response did not tell us what encoding format to expect, Then use chardet to override what `requests` thinks. | ||||
|         # For example - some sites don't tell us it's utf-8, but return utf-8 content | ||||
|         # This seems to not occur when using webdriver/selenium, it seems to detect the text encoding more reliably. | ||||
|         # https://github.com/psf/requests/issues/1604 good info about requests encoding detection | ||||
|         if not is_binary: | ||||
|             # Don't run this for PDF (and requests identified as binary) takes a _long_ time | ||||
|             if not r.headers.get('content-type') or not 'charset=' in r.headers.get('content-type'): | ||||
|                 encoding = chardet.detect(r.content)['encoding'] | ||||
|                 if encoding: | ||||
|                     r.encoding = encoding | ||||
|  | ||||
|         if not r.content or not len(r.content): | ||||
|             raise EmptyReply(url=url, status_code=r.status_code) | ||||
|  | ||||
|         # @todo test this | ||||
|         # @todo maybe you really want to test zero-byte return pages? | ||||
|         if r.status_code != 200 and not ignore_status_codes: | ||||
|             # maybe check with content works? | ||||
|             raise Non200ErrorCodeReceived(url=url, status_code=r.status_code, page_html=r.text) | ||||
|  | ||||
|         self.status_code = r.status_code | ||||
|         if is_binary: | ||||
|             # Binary files just return their checksum until we add something smarter | ||||
|             self.content = hashlib.md5(r.content).hexdigest() | ||||
|         else: | ||||
|             self.content = r.text | ||||
|  | ||||
|         self.headers = r.headers | ||||
|         self.raw_content = r.content | ||||
|  | ||||
|  | ||||
| # Decide which is the 'real' HTML webdriver, this is more a system wide config | ||||
| # rather than site-specific. | ||||
| use_playwright_as_chrome_fetcher = os.getenv('PLAYWRIGHT_DRIVER_URL', False) | ||||
| if use_playwright_as_chrome_fetcher: | ||||
|     html_webdriver = base_html_playwright | ||||
| else: | ||||
|     html_webdriver = base_html_webdriver | ||||
							
								
								
									
										43
									
								
								changedetectionio/content_fetchers/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,43 @@ | ||||
| import sys | ||||
| from distutils.util import strtobool | ||||
| from loguru import logger | ||||
| from changedetectionio.content_fetchers.exceptions import BrowserStepsStepException | ||||
| import os | ||||
|  | ||||
| visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4,header,footer,section,article,aside,details,main,nav,section,summary' | ||||
|  | ||||
| # available_fetchers() will scan this implementation looking for anything starting with html_ | ||||
| # this information is used in the form selections | ||||
| from changedetectionio.content_fetchers.requests import fetcher as html_requests | ||||
|  | ||||
| def available_fetchers(): | ||||
|     # See the if statement at the bottom of this file for how we switch between playwright and webdriver | ||||
|     import inspect | ||||
|     p = [] | ||||
|     for name, obj in inspect.getmembers(sys.modules[__name__], inspect.isclass): | ||||
|         if inspect.isclass(obj): | ||||
|             # @todo html_ is maybe better as fetcher_ or something | ||||
|             # In this case, make sure to edit the default one in store.py and fetch_site_status.py | ||||
|             if name.startswith('html_'): | ||||
|                 t = tuple([name, obj.fetcher_description]) | ||||
|                 p.append(t) | ||||
|  | ||||
|     return p | ||||
|  | ||||
|  | ||||
| # Decide which is the 'real' HTML webdriver, this is more a system wide config | ||||
| # rather than site-specific. | ||||
| use_playwright_as_chrome_fetcher = os.getenv('PLAYWRIGHT_DRIVER_URL', False) | ||||
| if use_playwright_as_chrome_fetcher: | ||||
|     # @note - For now, browser steps always uses playwright | ||||
|     if not strtobool(os.getenv('FAST_PUPPETEER_CHROME_FETCHER', 'False')): | ||||
|         logger.debug('Using Playwright library as fetcher') | ||||
|         from .playwright import fetcher as html_webdriver | ||||
|     else: | ||||
|         logger.debug('Using direct Python Puppeteer library as fetcher') | ||||
|         from .puppeteer import fetcher as html_webdriver | ||||
|  | ||||
| else: | ||||
|     logger.debug("Falling back to selenium as fetcher") | ||||
|     from .webdriver_selenium import fetcher as html_webdriver | ||||
|  | ||||
							
								
								
									
										171
									
								
								changedetectionio/content_fetchers/base.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,171 @@ | ||||
| import os | ||||
| from abc import abstractmethod | ||||
| from loguru import logger | ||||
|  | ||||
| from changedetectionio.content_fetchers import BrowserStepsStepException | ||||
|  | ||||
|  | ||||
| def manage_user_agent(headers, current_ua=''): | ||||
|     """ | ||||
|     Basic setting of user-agent | ||||
|  | ||||
|     NOTE!!!!!! The service that does the actual Chrome fetching should handle any anti-robot techniques | ||||
|     THERE ARE MANY WAYS THAT IT CAN BE DETECTED AS A ROBOT!! | ||||
|     This does not take care of | ||||
|     - Scraping of 'navigator' (platform, productSub, vendor, oscpu etc etc) browser object (navigator.appVersion) etc | ||||
|     - TCP/IP fingerprint JA3 etc | ||||
|     - Graphic rendering fingerprinting | ||||
|     - Your IP being obviously in a pool of bad actors | ||||
|     - Too many requests | ||||
|     - Scraping of SCH-UA browser replies (thanks google!!) | ||||
|     - Scraping of ServiceWorker, new window calls etc | ||||
|  | ||||
|     See https://filipvitas.medium.com/how-to-set-user-agent-header-with-puppeteer-js-and-not-fail-28c7a02165da | ||||
|     Puppeteer requests https://github.com/dgtlmoon/pyppeteerstealth | ||||
|  | ||||
|     :param page: | ||||
|     :param headers: | ||||
|     :return: | ||||
|     """ | ||||
|     # Ask it what the user agent is, if its obviously ChromeHeadless, switch it to the default | ||||
|     ua_in_custom_headers = next((v for k, v in headers.items() if k.lower() == "user-agent"), None) | ||||
|     if ua_in_custom_headers: | ||||
|         return ua_in_custom_headers | ||||
|  | ||||
|     if not ua_in_custom_headers and current_ua: | ||||
|         current_ua = current_ua.replace('HeadlessChrome', 'Chrome') | ||||
|         return current_ua | ||||
|  | ||||
|     return None | ||||
|  | ||||
|  | ||||
| class Fetcher(): | ||||
|     browser_connection_is_custom = None | ||||
|     browser_connection_url = None | ||||
|     browser_steps = None | ||||
|     browser_steps_screenshot_path = None | ||||
|     content = None | ||||
|     error = None | ||||
|     fetcher_description = "No description" | ||||
|     headers = {} | ||||
|     instock_data = None | ||||
|     instock_data_js = "" | ||||
|     status_code = None | ||||
|     webdriver_js_execute_code = None | ||||
|     xpath_data = None | ||||
|     xpath_element_js = "" | ||||
|  | ||||
|     # Will be needed in the future by the VisualSelector, always get this where possible. | ||||
|     screenshot = False | ||||
|     system_http_proxy = os.getenv('HTTP_PROXY') | ||||
|     system_https_proxy = os.getenv('HTTPS_PROXY') | ||||
|  | ||||
|     # Time ONTOP of the system defined env minimum time | ||||
|     render_extract_delay = 0 | ||||
|  | ||||
|     def __init__(self): | ||||
|         from pkg_resources import resource_string | ||||
|         # The code that scrapes elements and makes a list of elements/size/position to click on in the VisualSelector | ||||
|         self.xpath_element_js = resource_string(__name__, "res/xpath_element_scraper.js").decode('utf-8') | ||||
|         self.instock_data_js = resource_string(__name__, "res/stock-not-in-stock.js").decode('utf-8') | ||||
|  | ||||
|     @abstractmethod | ||||
|     def get_error(self): | ||||
|         return self.error | ||||
|  | ||||
|     @abstractmethod | ||||
|     def run(self, | ||||
|             url, | ||||
|             timeout, | ||||
|             request_headers, | ||||
|             request_body, | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False): | ||||
|         # Should set self.error, self.status_code and self.content | ||||
|         pass | ||||
|  | ||||
|     @abstractmethod | ||||
|     def quit(self): | ||||
|         return | ||||
|  | ||||
|     @abstractmethod | ||||
|     def get_last_status_code(self): | ||||
|         return self.status_code | ||||
|  | ||||
|     @abstractmethod | ||||
|     def screenshot_step(self, step_n): | ||||
|         return None | ||||
|  | ||||
|     @abstractmethod | ||||
|     # Return true/false if this checker is ready to run, in the case it needs todo some special config check etc | ||||
|     def is_ready(self): | ||||
|         return True | ||||
|  | ||||
|     def get_all_headers(self): | ||||
|         """ | ||||
|         Get all headers but ensure all keys are lowercase | ||||
|         :return: | ||||
|         """ | ||||
|         return {k.lower(): v for k, v in self.headers.items()} | ||||
|  | ||||
|     def browser_steps_get_valid_steps(self): | ||||
|         if self.browser_steps is not None and len(self.browser_steps): | ||||
|             valid_steps = filter( | ||||
|                 lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'), | ||||
|                 self.browser_steps) | ||||
|  | ||||
|             return valid_steps | ||||
|  | ||||
|         return None | ||||
|  | ||||
|     def iterate_browser_steps(self): | ||||
|         from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface | ||||
|         from playwright._impl._errors import TimeoutError, Error | ||||
|         from jinja2 import Environment | ||||
|         jinja2_env = Environment(extensions=['jinja2_time.TimeExtension']) | ||||
|  | ||||
|         step_n = 0 | ||||
|  | ||||
|         if self.browser_steps is not None and len(self.browser_steps): | ||||
|             interface = steppable_browser_interface() | ||||
|             interface.page = self.page | ||||
|             valid_steps = self.browser_steps_get_valid_steps() | ||||
|  | ||||
|             for step in valid_steps: | ||||
|                 step_n += 1 | ||||
|                 logger.debug(f">> Iterating check - browser Step n {step_n} - {step['operation']}...") | ||||
|                 self.screenshot_step("before-" + str(step_n)) | ||||
|                 self.save_step_html("before-" + str(step_n)) | ||||
|                 try: | ||||
|                     optional_value = step['optional_value'] | ||||
|                     selector = step['selector'] | ||||
|                     # Support for jinja2 template in step values, with date module added | ||||
|                     if '{%' in step['optional_value'] or '{{' in step['optional_value']: | ||||
|                         optional_value = str(jinja2_env.from_string(step['optional_value']).render()) | ||||
|                     if '{%' in step['selector'] or '{{' in step['selector']: | ||||
|                         selector = str(jinja2_env.from_string(step['selector']).render()) | ||||
|  | ||||
|                     getattr(interface, "call_action")(action_name=step['operation'], | ||||
|                                                       selector=selector, | ||||
|                                                       optional_value=optional_value) | ||||
|                     self.screenshot_step(step_n) | ||||
|                     self.save_step_html(step_n) | ||||
|                 except (Error, TimeoutError) as e: | ||||
|                     logger.debug(str(e)) | ||||
|                     # Stop processing here | ||||
|                     raise BrowserStepsStepException(step_n=step_n, original_e=e) | ||||
|  | ||||
|     # It's always good to reset these | ||||
|     def delete_browser_steps_screenshots(self): | ||||
|         import glob | ||||
|         if self.browser_steps_screenshot_path is not None: | ||||
|             dest = os.path.join(self.browser_steps_screenshot_path, 'step_*.jpeg') | ||||
|             files = glob.glob(dest) | ||||
|             for f in files: | ||||
|                 if os.path.isfile(f): | ||||
|                     os.unlink(f) | ||||
|  | ||||
|     def save_step_html(self, param): | ||||
|         pass | ||||
							
								
								
									
										97
									
								
								changedetectionio/content_fetchers/exceptions/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,97 @@ | ||||
| from loguru import logger | ||||
|  | ||||
|  | ||||
| class Non200ErrorCodeReceived(Exception): | ||||
|     def __init__(self, status_code, url, screenshot=None, xpath_data=None, page_html=None): | ||||
|         # Set this so we can use it in other parts of the app | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         self.screenshot = screenshot | ||||
|         self.xpath_data = xpath_data | ||||
|         self.page_text = None | ||||
|  | ||||
|         if page_html: | ||||
|             from changedetectionio import html_tools | ||||
|             self.page_text = html_tools.html_to_text(page_html) | ||||
|         return | ||||
|  | ||||
|  | ||||
| class checksumFromPreviousCheckWasTheSame(Exception): | ||||
|     def __init__(self): | ||||
|         return | ||||
|  | ||||
|  | ||||
| class JSActionExceptions(Exception): | ||||
|     def __init__(self, status_code, url, screenshot, message=''): | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         self.screenshot = screenshot | ||||
|         self.message = message | ||||
|         return | ||||
|  | ||||
| class BrowserConnectError(Exception): | ||||
|     msg = '' | ||||
|     def __init__(self, msg): | ||||
|         self.msg = msg | ||||
|         logger.error(f"Browser connection error {msg}") | ||||
|         return | ||||
|  | ||||
| class BrowserFetchTimedOut(Exception): | ||||
|     msg = '' | ||||
|     def __init__(self, msg): | ||||
|         self.msg = msg | ||||
|         logger.error(f"Browser processing took too long - {msg}") | ||||
|         return | ||||
|  | ||||
| class BrowserStepsStepException(Exception): | ||||
|     def __init__(self, step_n, original_e): | ||||
|         self.step_n = step_n | ||||
|         self.original_e = original_e | ||||
|         logger.debug(f"Browser Steps exception at step {self.step_n} {str(original_e)}") | ||||
|         return | ||||
|  | ||||
|  | ||||
| # @todo - make base Exception class that announces via logger() | ||||
| class PageUnloadable(Exception): | ||||
|     def __init__(self, status_code=None, url='', message='', screenshot=False): | ||||
|         # Set this so we can use it in other parts of the app | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         self.screenshot = screenshot | ||||
|         self.message = message | ||||
|         return | ||||
|  | ||||
| class BrowserStepsInUnsupportedFetcher(Exception): | ||||
|     def __init__(self, url): | ||||
|         self.url = url | ||||
|         return | ||||
|  | ||||
| class EmptyReply(Exception): | ||||
|     def __init__(self, status_code, url, screenshot=None): | ||||
|         # Set this so we can use it in other parts of the app | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         self.screenshot = screenshot | ||||
|         return | ||||
|  | ||||
|  | ||||
| class ScreenshotUnavailable(Exception): | ||||
|     def __init__(self, status_code, url, page_html=None): | ||||
|         # Set this so we can use it in other parts of the app | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         if page_html: | ||||
|             from html_tools import html_to_text | ||||
|             self.page_text = html_to_text(page_html) | ||||
|         return | ||||
|  | ||||
|  | ||||
| class ReplyWithContentButNoText(Exception): | ||||
|     def __init__(self, status_code, url, screenshot=None, has_filters=False, html_content=''): | ||||
|         # Set this so we can use it in other parts of the app | ||||
|         self.status_code = status_code | ||||
|         self.url = url | ||||
|         self.screenshot = screenshot | ||||
|         self.has_filters = has_filters | ||||
|         self.html_content = html_content | ||||
|         return | ||||
							
								
								
									
										208
									
								
								changedetectionio/content_fetchers/playwright.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,208 @@ | ||||
| import json | ||||
| import os | ||||
| from urllib.parse import urlparse | ||||
|  | ||||
| from loguru import logger | ||||
|  | ||||
| from changedetectionio.content_fetchers.base import Fetcher, manage_user_agent | ||||
| from changedetectionio.content_fetchers.exceptions import PageUnloadable, Non200ErrorCodeReceived, EmptyReply, ScreenshotUnavailable | ||||
|  | ||||
| class fetcher(Fetcher): | ||||
|     fetcher_description = "Playwright {}/Javascript".format( | ||||
|         os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').capitalize() | ||||
|     ) | ||||
|     if os.getenv("PLAYWRIGHT_DRIVER_URL"): | ||||
|         fetcher_description += " via '{}'".format(os.getenv("PLAYWRIGHT_DRIVER_URL")) | ||||
|  | ||||
|     browser_type = '' | ||||
|     command_executor = '' | ||||
|  | ||||
|     # Configs for Proxy setup | ||||
|     # In the ENV vars, is prefixed with "playwright_proxy_", so it is for example "playwright_proxy_server" | ||||
|     playwright_proxy_settings_mappings = ['bypass', 'server', 'username', 'password'] | ||||
|  | ||||
|     proxy = None | ||||
|  | ||||
|     def __init__(self, proxy_override=None, custom_browser_connection_url=None): | ||||
|         super().__init__() | ||||
|  | ||||
|         self.browser_type = os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').strip('"') | ||||
|  | ||||
|         if custom_browser_connection_url: | ||||
|             self.browser_connection_is_custom = True | ||||
|             self.browser_connection_url = custom_browser_connection_url | ||||
|         else: | ||||
|             # Fallback to fetching from system | ||||
|             # .strip('"') is going to save someone a lot of time when they accidently wrap the env value | ||||
|             self.browser_connection_url = os.getenv("PLAYWRIGHT_DRIVER_URL", 'ws://playwright-chrome:3000').strip('"') | ||||
|  | ||||
|         # If any proxy settings are enabled, then we should setup the proxy object | ||||
|         proxy_args = {} | ||||
|         for k in self.playwright_proxy_settings_mappings: | ||||
|             v = os.getenv('playwright_proxy_' + k, False) | ||||
|             if v: | ||||
|                 proxy_args[k] = v.strip('"') | ||||
|  | ||||
|         if proxy_args: | ||||
|             self.proxy = proxy_args | ||||
|  | ||||
|         # allow per-watch proxy selection override | ||||
|         if proxy_override: | ||||
|             self.proxy = {'server': proxy_override} | ||||
|  | ||||
|         if self.proxy: | ||||
|             # Playwright needs separate username and password values | ||||
|             parsed = urlparse(self.proxy.get('server')) | ||||
|             if parsed.username: | ||||
|                 self.proxy['username'] = parsed.username | ||||
|                 self.proxy['password'] = parsed.password | ||||
|  | ||||
|     def screenshot_step(self, step_n=''): | ||||
|         screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=int(os.getenv("SCREENSHOT_QUALITY", 72))) | ||||
|  | ||||
|         if self.browser_steps_screenshot_path is not None: | ||||
|             destination = os.path.join(self.browser_steps_screenshot_path, 'step_{}.jpeg'.format(step_n)) | ||||
|             logger.debug(f"Saving step screenshot to {destination}") | ||||
|             with open(destination, 'wb') as f: | ||||
|                 f.write(screenshot) | ||||
|  | ||||
|     def save_step_html(self, step_n): | ||||
|         content = self.page.content() | ||||
|         destination = os.path.join(self.browser_steps_screenshot_path, 'step_{}.html'.format(step_n)) | ||||
|         logger.debug(f"Saving step HTML to {destination}") | ||||
|         with open(destination, 'w') as f: | ||||
|             f.write(content) | ||||
|  | ||||
|     def run(self, | ||||
|             url, | ||||
|             timeout, | ||||
|             request_headers, | ||||
|             request_body, | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False): | ||||
|  | ||||
|         from playwright.sync_api import sync_playwright | ||||
|         import playwright._impl._errors | ||||
|         from changedetectionio.content_fetchers import visualselector_xpath_selectors | ||||
|         self.delete_browser_steps_screenshots() | ||||
|         response = None | ||||
|  | ||||
|         with sync_playwright() as p: | ||||
|             browser_type = getattr(p, self.browser_type) | ||||
|  | ||||
|             # Seemed to cause a connection Exception even tho I can see it connect | ||||
|             # self.browser = browser_type.connect(self.command_executor, timeout=timeout*1000) | ||||
|             # 60,000 connection timeout only | ||||
|             browser = browser_type.connect_over_cdp(self.browser_connection_url, timeout=60000) | ||||
|  | ||||
|             # SOCKS5 with authentication is not supported (yet) | ||||
|             # https://github.com/microsoft/playwright/issues/10567 | ||||
|  | ||||
|             # Set user agent to prevent Cloudflare from blocking the browser | ||||
|             # Use the default one configured in the App.py model that's passed from fetch_site_status.py | ||||
|             context = browser.new_context( | ||||
|                 accept_downloads=False,  # Should never be needed | ||||
|                 bypass_csp=True,  # This is needed to enable JavaScript execution on GitHub and others | ||||
|                 extra_http_headers=request_headers, | ||||
|                 ignore_https_errors=True, | ||||
|                 proxy=self.proxy, | ||||
|                 service_workers=os.getenv('PLAYWRIGHT_SERVICE_WORKERS', 'allow'), # Should be `allow` or `block` - sites like YouTube can transmit large amounts of data via Service Workers | ||||
|                 user_agent=manage_user_agent(headers=request_headers), | ||||
|             ) | ||||
|  | ||||
|             self.page = context.new_page() | ||||
|  | ||||
|             # Listen for all console events and handle errors | ||||
|             self.page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}")) | ||||
|  | ||||
|             # Re-use as much code from browser steps as possible so its the same | ||||
|             from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface | ||||
|             browsersteps_interface = steppable_browser_interface() | ||||
|             browsersteps_interface.page = self.page | ||||
|  | ||||
|             response = browsersteps_interface.action_goto_url(value=url) | ||||
|             self.headers = response.all_headers() | ||||
|  | ||||
|             if response is None: | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 logger.debug("Content Fetcher > Response object was none") | ||||
|                 raise EmptyReply(url=url, status_code=None) | ||||
|  | ||||
|             try: | ||||
|                 if self.webdriver_js_execute_code is not None and len(self.webdriver_js_execute_code): | ||||
|                     browsersteps_interface.action_execute_js(value=self.webdriver_js_execute_code, selector=None) | ||||
|             except playwright._impl._errors.TimeoutError as e: | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 # This can be ok, we will try to grab what we could retrieve | ||||
|                 pass | ||||
|             except Exception as e: | ||||
|                 logger.debug(f"Content Fetcher > Other exception when executing custom JS code {str(e)}") | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 raise PageUnloadable(url=url, status_code=None, message=str(e)) | ||||
|  | ||||
|             extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay | ||||
|             self.page.wait_for_timeout(extra_wait * 1000) | ||||
|  | ||||
|             try: | ||||
|                 self.status_code = response.status | ||||
|             except Exception as e: | ||||
|                 # https://github.com/dgtlmoon/changedetection.io/discussions/2122#discussioncomment-8241962 | ||||
|                 logger.critical(f"Response from the browser/Playwright did not have a status_code! Response follows.") | ||||
|                 logger.critical(response) | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 raise PageUnloadable(url=url, status_code=None, message=str(e)) | ||||
|  | ||||
|             if self.status_code != 200 and not ignore_status_codes: | ||||
|                 screenshot = self.page.screenshot(type='jpeg', full_page=True, | ||||
|                                                   quality=int(os.getenv("SCREENSHOT_QUALITY", 72))) | ||||
|  | ||||
|                 raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, screenshot=screenshot) | ||||
|  | ||||
|             if len(self.page.content().strip()) == 0: | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
|                 logger.debug("Content Fetcher > Content was empty") | ||||
|                 raise EmptyReply(url=url, status_code=response.status) | ||||
|  | ||||
|             # Run Browser Steps here | ||||
|             if self.browser_steps_get_valid_steps(): | ||||
|                 self.iterate_browser_steps() | ||||
|  | ||||
|             self.page.wait_for_timeout(extra_wait * 1000) | ||||
|  | ||||
|             # So we can find an element on the page where its selector was entered manually (maybe not xPath etc) | ||||
|             if current_include_filters is not None: | ||||
|                 self.page.evaluate("var include_filters={}".format(json.dumps(current_include_filters))) | ||||
|             else: | ||||
|                 self.page.evaluate("var include_filters=''") | ||||
|  | ||||
|             self.xpath_data = self.page.evaluate( | ||||
|                 "async () => {" + self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) + "}") | ||||
|             self.instock_data = self.page.evaluate("async () => {" + self.instock_data_js + "}") | ||||
|  | ||||
|             self.content = self.page.content() | ||||
|             # Bug 3 in Playwright screenshot handling | ||||
|             # Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it | ||||
|             # JPEG is better here because the screenshots can be very very large | ||||
|  | ||||
|             # Screenshots also travel via the ws:// (websocket) meaning that the binary data is base64 encoded | ||||
|             # which will significantly increase the IO size between the server and client, it's recommended to use the lowest | ||||
|             # acceptable screenshot quality here | ||||
|             try: | ||||
|                 # The actual screenshot - this always base64 and needs decoding! horrible! huge CPU usage | ||||
|                 self.screenshot = self.page.screenshot(type='jpeg', | ||||
|                                                        full_page=True, | ||||
|                                                        quality=int(os.getenv("SCREENSHOT_QUALITY", 72)), | ||||
|                                                        ) | ||||
|             except Exception as e: | ||||
|                 # It's likely the screenshot was too long/big and something crashed | ||||
|                 raise ScreenshotUnavailable(url=url, status_code=self.status_code) | ||||
|             finally: | ||||
|                 context.close() | ||||
|                 browser.close() | ||||
							
								
								
									
										247
									
								
								changedetectionio/content_fetchers/puppeteer.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,247 @@ | ||||
| import asyncio | ||||
| import json | ||||
| import os | ||||
| import websockets.exceptions | ||||
| from urllib.parse import urlparse | ||||
|  | ||||
| from loguru import logger | ||||
|  | ||||
| from changedetectionio.content_fetchers.base import Fetcher, manage_user_agent | ||||
| from changedetectionio.content_fetchers.exceptions import PageUnloadable, Non200ErrorCodeReceived, EmptyReply, BrowserFetchTimedOut, BrowserConnectError | ||||
|  | ||||
|  | ||||
| class fetcher(Fetcher): | ||||
|     fetcher_description = "Puppeteer/direct {}/Javascript".format( | ||||
|         os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').capitalize() | ||||
|     ) | ||||
|     if os.getenv("PLAYWRIGHT_DRIVER_URL"): | ||||
|         fetcher_description += " via '{}'".format(os.getenv("PLAYWRIGHT_DRIVER_URL")) | ||||
|  | ||||
|     browser_type = '' | ||||
|     command_executor = '' | ||||
|  | ||||
|     proxy = None | ||||
|  | ||||
|     def __init__(self, proxy_override=None, custom_browser_connection_url=None): | ||||
|         super().__init__() | ||||
|  | ||||
|         if custom_browser_connection_url: | ||||
|             self.browser_connection_is_custom = True | ||||
|             self.browser_connection_url = custom_browser_connection_url | ||||
|         else: | ||||
|             # Fallback to fetching from system | ||||
|             # .strip('"') is going to save someone a lot of time when they accidently wrap the env value | ||||
|             self.browser_connection_url = os.getenv("PLAYWRIGHT_DRIVER_URL", 'ws://playwright-chrome:3000').strip('"') | ||||
|  | ||||
|         # allow per-watch proxy selection override | ||||
|         # @todo check global too? | ||||
|         if proxy_override: | ||||
|             # Playwright needs separate username and password values | ||||
|             parsed = urlparse(proxy_override) | ||||
|             if parsed: | ||||
|                 self.proxy = {'username': parsed.username, 'password': parsed.password} | ||||
|                 # Add the proxy server chrome start option, the username and password never gets added here | ||||
|                 # (It always goes in via await self.page.authenticate(self.proxy)) | ||||
|  | ||||
|                 # @todo filter some injection attack? | ||||
|                 # check scheme when no scheme | ||||
|                 proxy_url = parsed.scheme + "://" if parsed.scheme else 'http://' | ||||
|                 r = "?" if not '?' in self.browser_connection_url else '&' | ||||
|                 port = ":"+str(parsed.port) if parsed.port else '' | ||||
|                 q = "?"+parsed.query if parsed.query else '' | ||||
|                 proxy_url += f"{parsed.hostname}{port}{parsed.path}{q}" | ||||
|                 self.browser_connection_url += f"{r}--proxy-server={proxy_url}" | ||||
|  | ||||
|     # def screenshot_step(self, step_n=''): | ||||
|     #     screenshot = self.page.screenshot(type='jpeg', full_page=True, quality=85) | ||||
|     # | ||||
|     #     if self.browser_steps_screenshot_path is not None: | ||||
|     #         destination = os.path.join(self.browser_steps_screenshot_path, 'step_{}.jpeg'.format(step_n)) | ||||
|     #         logger.debug(f"Saving step screenshot to {destination}") | ||||
|     #         with open(destination, 'wb') as f: | ||||
|     #             f.write(screenshot) | ||||
|     # | ||||
|     # def save_step_html(self, step_n): | ||||
|     #     content = self.page.content() | ||||
|     #     destination = os.path.join(self.browser_steps_screenshot_path, 'step_{}.html'.format(step_n)) | ||||
|     #     logger.debug(f"Saving step HTML to {destination}") | ||||
|     #     with open(destination, 'w') as f: | ||||
|     #         f.write(content) | ||||
|  | ||||
|     async def fetch_page(self, | ||||
|                          url, | ||||
|                          timeout, | ||||
|                          request_headers, | ||||
|                          request_body, | ||||
|                          request_method, | ||||
|                          ignore_status_codes, | ||||
|                          current_include_filters, | ||||
|                          is_binary | ||||
|                          ): | ||||
|  | ||||
|         from changedetectionio.content_fetchers import visualselector_xpath_selectors | ||||
|         self.delete_browser_steps_screenshots() | ||||
|         extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay | ||||
|  | ||||
|         from pyppeteer import Pyppeteer | ||||
|         pyppeteer_instance = Pyppeteer() | ||||
|  | ||||
|         # Connect directly using the specified browser_ws_endpoint | ||||
|         # @todo timeout | ||||
|         try: | ||||
|             browser = await pyppeteer_instance.connect(browserWSEndpoint=self.browser_connection_url, | ||||
|                                                        ignoreHTTPSErrors=True | ||||
|                                                        ) | ||||
|         except websockets.exceptions.InvalidStatusCode as e: | ||||
|             raise BrowserConnectError(msg=f"Error while trying to connect the browser, Code {e.status_code} (check your access)") | ||||
|         except websockets.exceptions.InvalidURI: | ||||
|             raise BrowserConnectError(msg=f"Error connecting to the browser, check your browser connection address (should be ws:// or wss://") | ||||
|         except Exception as e: | ||||
|             raise BrowserConnectError(msg=f"Error connecting to the browser {str(e)}") | ||||
|         else: | ||||
|             self.page = await browser.newPage() | ||||
|  | ||||
|         await self.page.setUserAgent(manage_user_agent(headers=request_headers, current_ua=await self.page.evaluate('navigator.userAgent'))) | ||||
|  | ||||
|         await self.page.setBypassCSP(True) | ||||
|         if request_headers: | ||||
|             await self.page.setExtraHTTPHeaders(request_headers) | ||||
|  | ||||
|         # SOCKS5 with authentication is not supported (yet) | ||||
|         # https://github.com/microsoft/playwright/issues/10567 | ||||
|         self.page.setDefaultNavigationTimeout(0) | ||||
|         await self.page.setCacheEnabled(True) | ||||
|         if self.proxy and self.proxy.get('username'): | ||||
|             # Setting Proxy-Authentication header is deprecated, and doing so can trigger header change errors from Puppeteer | ||||
|             # https://github.com/puppeteer/puppeteer/issues/676 ? | ||||
|             # https://help.brightdata.com/hc/en-us/articles/12632549957649-Proxy-Manager-How-to-Guides#h_01HAKWR4Q0AFS8RZTNYWRDFJC2 | ||||
|             # https://cri.dev/posts/2020-03-30-How-to-solve-Puppeteer-Chrome-Error-ERR_INVALID_ARGUMENT/ | ||||
|             await self.page.authenticate(self.proxy) | ||||
|  | ||||
|         # Re-use as much code from browser steps as possible so its the same | ||||
|         # from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface | ||||
|  | ||||
|         # not yet used here, we fallback to playwright when browsersteps is required | ||||
|         #            browsersteps_interface = steppable_browser_interface() | ||||
|         #            browsersteps_interface.page = self.page | ||||
|  | ||||
|         response = await self.page.goto(url, waitUntil="load") | ||||
|  | ||||
|  | ||||
|         if response is None: | ||||
|             await self.page.close() | ||||
|             await browser.close() | ||||
|             logger.warning("Content Fetcher > Response object was none") | ||||
|             raise EmptyReply(url=url, status_code=None) | ||||
|  | ||||
|         self.headers = response.headers | ||||
|  | ||||
|         try: | ||||
|             if self.webdriver_js_execute_code is not None and len(self.webdriver_js_execute_code): | ||||
|                 await self.page.evaluate(self.webdriver_js_execute_code) | ||||
|         except Exception as e: | ||||
|             logger.warning("Got exception when running evaluate on custom JS code") | ||||
|             logger.error(str(e)) | ||||
|             await self.page.close() | ||||
|             await browser.close() | ||||
|             # This can be ok, we will try to grab what we could retrieve | ||||
|             raise PageUnloadable(url=url, status_code=None, message=str(e)) | ||||
|  | ||||
|         try: | ||||
|             self.status_code = response.status | ||||
|         except Exception as e: | ||||
|             # https://github.com/dgtlmoon/changedetection.io/discussions/2122#discussioncomment-8241962 | ||||
|             logger.critical(f"Response from the browser/Playwright did not have a status_code! Response follows.") | ||||
|             logger.critical(response) | ||||
|             await self.page.close() | ||||
|             await browser.close() | ||||
|             raise PageUnloadable(url=url, status_code=None, message=str(e)) | ||||
|  | ||||
|         if self.status_code != 200 and not ignore_status_codes: | ||||
|             screenshot = await self.page.screenshot(type_='jpeg', | ||||
|                                                     fullPage=True, | ||||
|                                                     quality=int(os.getenv("SCREENSHOT_QUALITY", 72))) | ||||
|  | ||||
|             raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, screenshot=screenshot) | ||||
|         content = await self.page.content | ||||
|         if len(content.strip()) == 0: | ||||
|             await self.page.close() | ||||
|             await browser.close() | ||||
|             logger.error("Content Fetcher > Content was empty") | ||||
|             raise EmptyReply(url=url, status_code=response.status) | ||||
|  | ||||
|         # Run Browser Steps here | ||||
|         # @todo not yet supported, we switch to playwright in this case | ||||
|         #            if self.browser_steps_get_valid_steps(): | ||||
|         #                self.iterate_browser_steps() | ||||
|  | ||||
|         await asyncio.sleep(1 + extra_wait) | ||||
|  | ||||
|         # So we can find an element on the page where its selector was entered manually (maybe not xPath etc) | ||||
|         # Setup the xPath/VisualSelector scraper | ||||
|         if current_include_filters is not None: | ||||
|             js = json.dumps(current_include_filters) | ||||
|             await self.page.evaluate(f"var include_filters={js}") | ||||
|         else: | ||||
|             await self.page.evaluate(f"var include_filters=''") | ||||
|  | ||||
|         self.xpath_data = await self.page.evaluate( | ||||
|             "async () => {" + self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) + "}") | ||||
|         self.instock_data = await self.page.evaluate("async () => {" + self.instock_data_js + "}") | ||||
|  | ||||
|         self.content = await self.page.content | ||||
|         # Bug 3 in Playwright screenshot handling | ||||
|         # Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it | ||||
|         # JPEG is better here because the screenshots can be very very large | ||||
|  | ||||
|         # Screenshots also travel via the ws:// (websocket) meaning that the binary data is base64 encoded | ||||
|         # which will significantly increase the IO size between the server and client, it's recommended to use the lowest | ||||
|         # acceptable screenshot quality here | ||||
|         try: | ||||
|             self.screenshot = await self.page.screenshot(type_='jpeg', | ||||
|                                                          fullPage=True, | ||||
|                                                          quality=int(os.getenv("SCREENSHOT_QUALITY", 72))) | ||||
|         except Exception as e: | ||||
|             logger.error("Error fetching screenshot") | ||||
|             # // May fail on very large pages with 'WARNING: tile memory limits exceeded, some content may not draw' | ||||
|             # // @ todo after text extract, we can place some overlay text with red background to say 'croppped' | ||||
|             logger.error('ERROR: content-fetcher page was maybe too large for a screenshot, reverting to viewport only screenshot') | ||||
|             try: | ||||
|                 self.screenshot = await self.page.screenshot(type_='jpeg', | ||||
|                                                              fullPage=False, | ||||
|                                                              quality=int(os.getenv("SCREENSHOT_QUALITY", 72))) | ||||
|             except Exception as e: | ||||
|                 logger.error('ERROR: Failed to get viewport-only reduced screenshot :(') | ||||
|                 pass | ||||
|         finally: | ||||
|             # It's good to log here in the case that the browser crashes on shutting down but we still get the data we need | ||||
|             logger.success(f"Fetching '{url}' complete, closing page") | ||||
|             await self.page.close() | ||||
|             logger.success(f"Fetching '{url}' complete, closing browser") | ||||
|             await browser.close() | ||||
|         logger.success(f"Fetching '{url}' complete, exiting puppeteer fetch.") | ||||
|  | ||||
|     async def main(self, **kwargs): | ||||
|         await self.fetch_page(**kwargs) | ||||
|  | ||||
|     def run(self, url, timeout, request_headers, request_body, request_method, ignore_status_codes=False, | ||||
|             current_include_filters=None, is_binary=False): | ||||
|  | ||||
|         #@todo make update_worker async which could run any of these content_fetchers within memory and time constraints | ||||
|         max_time = os.getenv('PUPPETEER_MAX_PROCESSING_TIMEOUT_SECONDS', 180) | ||||
|  | ||||
|         # This will work in 3.10 but not >= 3.11 because 3.11 wants tasks only | ||||
|         try: | ||||
|             asyncio.run(asyncio.wait_for(self.main( | ||||
|                 url=url, | ||||
|                 timeout=timeout, | ||||
|                 request_headers=request_headers, | ||||
|                 request_body=request_body, | ||||
|                 request_method=request_method, | ||||
|                 ignore_status_codes=ignore_status_codes, | ||||
|                 current_include_filters=current_include_filters, | ||||
|                 is_binary=is_binary | ||||
|             ), timeout=max_time)) | ||||
|         except asyncio.TimeoutError: | ||||
|             raise(BrowserFetchTimedOut(msg=f"Browser connected but was unable to process the page in {max_time} seconds.")) | ||||
|  | ||||
							
								
								
									
										91
									
								
								changedetectionio/content_fetchers/requests.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,91 @@ | ||||
| import hashlib | ||||
| import os | ||||
|  | ||||
| import chardet | ||||
| import requests | ||||
|  | ||||
| from changedetectionio.content_fetchers.exceptions import BrowserStepsInUnsupportedFetcher, EmptyReply, Non200ErrorCodeReceived | ||||
| from changedetectionio.content_fetchers.base import Fetcher | ||||
|  | ||||
|  | ||||
| # "html_requests" is listed as the default fetcher in store.py! | ||||
| class fetcher(Fetcher): | ||||
|     fetcher_description = "Basic fast Plaintext/HTTP Client" | ||||
|  | ||||
|     def __init__(self, proxy_override=None, custom_browser_connection_url=None): | ||||
|         super().__init__() | ||||
|         self.proxy_override = proxy_override | ||||
|         # browser_connection_url is none because its always 'launched locally' | ||||
|  | ||||
|     def run(self, | ||||
|             url, | ||||
|             timeout, | ||||
|             request_headers, | ||||
|             request_body, | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False): | ||||
|  | ||||
|         if self.browser_steps_get_valid_steps(): | ||||
|             raise BrowserStepsInUnsupportedFetcher(url=url) | ||||
|  | ||||
|         # Make requests use a more modern looking user-agent | ||||
|         if not {k.lower(): v for k, v in request_headers.items()}.get('user-agent', None): | ||||
|             request_headers['User-Agent'] = os.getenv("DEFAULT_SETTINGS_HEADERS_USERAGENT", | ||||
|                                                       'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36') | ||||
|  | ||||
|         proxies = {} | ||||
|  | ||||
|         # Allows override the proxy on a per-request basis | ||||
|  | ||||
|         # https://requests.readthedocs.io/en/latest/user/advanced/#socks | ||||
|         # Should also work with `socks5://user:pass@host:port` type syntax. | ||||
|  | ||||
|         if self.proxy_override: | ||||
|             proxies = {'http': self.proxy_override, 'https': self.proxy_override, 'ftp': self.proxy_override} | ||||
|         else: | ||||
|             if self.system_http_proxy: | ||||
|                 proxies['http'] = self.system_http_proxy | ||||
|             if self.system_https_proxy: | ||||
|                 proxies['https'] = self.system_https_proxy | ||||
|  | ||||
|         r = requests.request(method=request_method, | ||||
|                              data=request_body, | ||||
|                              url=url, | ||||
|                              headers=request_headers, | ||||
|                              timeout=timeout, | ||||
|                              proxies=proxies, | ||||
|                              verify=False) | ||||
|  | ||||
|         # If the response did not tell us what encoding format to expect, Then use chardet to override what `requests` thinks. | ||||
|         # For example - some sites don't tell us it's utf-8, but return utf-8 content | ||||
|         # This seems to not occur when using webdriver/selenium, it seems to detect the text encoding more reliably. | ||||
|         # https://github.com/psf/requests/issues/1604 good info about requests encoding detection | ||||
|         if not is_binary: | ||||
|             # Don't run this for PDF (and requests identified as binary) takes a _long_ time | ||||
|             if not r.headers.get('content-type') or not 'charset=' in r.headers.get('content-type'): | ||||
|                 encoding = chardet.detect(r.content)['encoding'] | ||||
|                 if encoding: | ||||
|                     r.encoding = encoding | ||||
|  | ||||
|         self.headers = r.headers | ||||
|  | ||||
|         if not r.content or not len(r.content): | ||||
|             raise EmptyReply(url=url, status_code=r.status_code) | ||||
|  | ||||
|         # @todo test this | ||||
|         # @todo maybe you really want to test zero-byte return pages? | ||||
|         if r.status_code != 200 and not ignore_status_codes: | ||||
|             # maybe check with content works? | ||||
|             raise Non200ErrorCodeReceived(url=url, status_code=r.status_code, page_html=r.text) | ||||
|  | ||||
|         self.status_code = r.status_code | ||||
|         if is_binary: | ||||
|             # Binary files just return their checksum until we add something smarter | ||||
|             self.content = hashlib.md5(r.content).hexdigest() | ||||
|         else: | ||||
|             self.content = r.text | ||||
|  | ||||
|  | ||||
|         self.raw_content = r.content | ||||
| @@ -146,7 +146,7 @@ module.exports = async ({page, context}) => { | ||||
|     var xpath_data; | ||||
|     var instock_data; | ||||
|     try { | ||||
|         // Not sure the best way here, in the future this should be a new package added to npm then run in browserless
 | ||||
|         // Not sure the best way here, in the future this should be a new package added to npm then run in evaluatedCode
 | ||||
|         // (Once the old playwright is removed)
 | ||||
|         xpath_data = await page.evaluate((include_filters) => {%xpath_scrape_code%}, include_filters); | ||||
|         instock_data = await page.evaluate(() => {%instock_scrape_code%}); | ||||
							
								
								
									
										192
									
								
								changedetectionio/content_fetchers/res/stock-not-in-stock.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,192 @@ | ||||
| // Restock Detector | ||||
| // (c) Leigh Morresi dgtlmoon@gmail.com | ||||
| // | ||||
| // Assumes the product is in stock to begin with, unless the following appears above the fold ; | ||||
| // - outOfStockTexts appears above the fold (out of stock) | ||||
| // - negateOutOfStockRegex (really is in stock) | ||||
|  | ||||
| function isItemInStock() { | ||||
|     // @todo Pass these in so the same list can be used in non-JS fetchers | ||||
|     const outOfStockTexts = [ | ||||
|         ' أخبرني عندما يتوفر', | ||||
|         '0 in stock', | ||||
|         'actuellement indisponible', | ||||
|         'agotado', | ||||
|         'article épuisé', | ||||
|         'artikel zurzeit vergriffen', | ||||
|         'as soon as stock is available', | ||||
|         'ausverkauft', // sold out | ||||
|         'available for back order', | ||||
|         'awaiting stock', | ||||
|         'back in stock soon', | ||||
|         'back-order or out of stock', | ||||
|         'backordered', | ||||
|         'benachrichtigt mich', // notify me | ||||
|         'brak na stanie', | ||||
|         'brak w magazynie', | ||||
|         'coming soon', | ||||
|         'currently have any tickets for this', | ||||
|         'currently unavailable', | ||||
|         'dieser artikel ist bald wieder verfügbar', | ||||
|         'dostępne wkrótce', | ||||
|         'en rupture de stock', | ||||
|         'ist derzeit nicht auf lager', | ||||
|         'item is no longer available', | ||||
|         'let me know when it\'s available', | ||||
|         'message if back in stock', | ||||
|         'nachricht bei', | ||||
|         'nicht auf lager', | ||||
|         'nicht lieferbar', | ||||
|         'nicht zur verfügung', | ||||
|         'niet beschikbaar', | ||||
|         'niet leverbaar', | ||||
|         'niet op voorraad', | ||||
|         'no disponible temporalmente', | ||||
|         'no longer in stock', | ||||
|         'no tickets available', | ||||
|         'not available', | ||||
|         'not currently available', | ||||
|         'not in stock', | ||||
|         'notify me when available', | ||||
|         'notify when available', | ||||
|         'não estamos a aceitar encomendas', | ||||
|         'out of stock', | ||||
|         'out-of-stock', | ||||
|         'prodotto esaurito', | ||||
|         'produkt niedostępny', | ||||
|         'sold out', | ||||
|         'sold-out', | ||||
|         'temporarily out of stock', | ||||
|         'temporarily unavailable', | ||||
|         'there were no search results for', | ||||
|         'this item is currently unavailable', | ||||
|         'tickets unavailable', | ||||
|         'tijdelijk uitverkocht', | ||||
|         'unavailable tickets', | ||||
|         'vorbestellung ist bald möglich', | ||||
|         'we couldn\'t find any products that match', | ||||
|         'we do not currently have an estimate of when this product will be back in stock.', | ||||
|         'we don\'t know when or if this item will be back in stock.', | ||||
|         'we were not able to find a match', | ||||
|         'zur zeit nicht an lager', | ||||
|         '品切れ', | ||||
|         '已售', | ||||
|         '已售完', | ||||
|         '품절' | ||||
|     ]; | ||||
|  | ||||
|  | ||||
|     const vh = Math.max(document.documentElement.clientHeight || 0, window.innerHeight || 0); | ||||
|  | ||||
|     function getElementBaseText(element) { | ||||
|         // .textContent can include text from children which may give the wrong results | ||||
|         // scan only immediate TEXT_NODEs, which will be a child of the element | ||||
|         var text = ""; | ||||
|         for (var i = 0; i < element.childNodes.length; ++i) | ||||
|             if (element.childNodes[i].nodeType === Node.TEXT_NODE) | ||||
|                 text += element.childNodes[i].textContent; | ||||
|         return text.toLowerCase().trim(); | ||||
|     } | ||||
|  | ||||
|     const negateOutOfStockRegex = new RegExp('^([0-9] in stock|add to cart|in stock)', 'ig'); | ||||
|  | ||||
|     // The out-of-stock or in-stock-text is generally always above-the-fold | ||||
|     // and often below-the-fold is a list of related products that may or may not contain trigger text | ||||
|     // so it's good to filter to just the 'above the fold' elements | ||||
|     // and it should be atleast 100px from the top to ignore items in the toolbar, sometimes menu items like "Coming soon" exist | ||||
|  | ||||
|  | ||||
| // @todo - if it's SVG or IMG, go into image diff mode | ||||
| // %ELEMENTS% replaced at injection time because different interfaces use it with different settings | ||||
|  | ||||
|     console.log("Scanning %ELEMENTS%"); | ||||
|  | ||||
|     function collectVisibleElements(parent, visibleElements) { | ||||
|         if (!parent) return; // Base case: if parent is null or undefined, return | ||||
|  | ||||
|         // Add the parent itself to the visible elements array if it's of the specified types | ||||
|         visibleElements.push(parent); | ||||
|  | ||||
|         // Iterate over the parent's children | ||||
|         const children = parent.children; | ||||
|         for (let i = 0; i < children.length; i++) { | ||||
|             const child = children[i]; | ||||
|             if ( | ||||
|                 child.nodeType === Node.ELEMENT_NODE && | ||||
|                 window.getComputedStyle(child).display !== 'none' && | ||||
|                 window.getComputedStyle(child).visibility !== 'hidden' && | ||||
|                 child.offsetWidth >= 0 && | ||||
|                 child.offsetHeight >= 0 && | ||||
|                 window.getComputedStyle(child).contentVisibility !== 'hidden' | ||||
|             ) { | ||||
|                 // If the child is an element and is visible, recursively collect visible elements | ||||
|                 collectVisibleElements(child, visibleElements); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     const elementsToScan = []; | ||||
|     collectVisibleElements(document.body, elementsToScan); | ||||
|  | ||||
|     var elementText = ""; | ||||
|  | ||||
|     // REGEXS THAT REALLY MEAN IT'S IN STOCK | ||||
|     for (let i = elementsToScan.length - 1; i >= 0; i--) { | ||||
|         const element = elementsToScan[i]; | ||||
|  | ||||
|         // outside the 'fold' or some weird text in the heading area | ||||
|         // .getBoundingClientRect() was causing a crash in chrome 119, can only be run on contentVisibility != hidden | ||||
|         if (element.getBoundingClientRect().top + window.scrollY >= vh || element.getBoundingClientRect().top + window.scrollY <= 100) { | ||||
|             continue | ||||
|         } | ||||
|  | ||||
|         elementText = ""; | ||||
|         if (element.tagName.toLowerCase() === "input") { | ||||
|             elementText = element.value.toLowerCase().trim(); | ||||
|         } else { | ||||
|             elementText = getElementBaseText(element); | ||||
|         } | ||||
|  | ||||
|         if (elementText.length) { | ||||
|             // try which ones could mean its in stock | ||||
|             if (negateOutOfStockRegex.test(elementText) && !elementText.includes('(0 products)')) { | ||||
|                 console.log(`Negating/overriding 'Out of Stock' back to "Possibly in stock" found "${elementText}"`) | ||||
|                 return 'Possibly in stock'; | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // OTHER STUFF THAT COULD BE THAT IT'S OUT OF STOCK | ||||
|     for (let i = elementsToScan.length - 1; i >= 0; i--) { | ||||
|         const element = elementsToScan[i]; | ||||
|         // outside the 'fold' or some weird text in the heading area | ||||
|         // .getBoundingClientRect() was causing a crash in chrome 119, can only be run on contentVisibility != hidden | ||||
|         if (element.getBoundingClientRect().top + window.scrollY >= vh + 150 || element.getBoundingClientRect().top + window.scrollY <= 100) { | ||||
|             continue | ||||
|         } | ||||
|         elementText = ""; | ||||
|         if (element.tagName.toLowerCase() === "input") { | ||||
|             elementText = element.value.toLowerCase().trim(); | ||||
|         } else { | ||||
|             elementText = getElementBaseText(element); | ||||
|         } | ||||
|  | ||||
|         if (elementText.length) { | ||||
|             // and these mean its out of stock | ||||
|             for (const outOfStockText of outOfStockTexts) { | ||||
|                 if (elementText.includes(outOfStockText)) { | ||||
|                     console.log(`Selected 'Out of Stock' - found text "${outOfStockText}" - "${elementText}"`) | ||||
|                     return outOfStockText; // item is out of stock | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     console.log(`Returning 'Possibly in stock' - cant' find any useful matching text`) | ||||
|     return 'Possibly in stock'; // possibly in stock, cant decide otherwise. | ||||
| } | ||||
|  | ||||
| // returns the element text that makes it think it's out of stock | ||||
| return isItemInStock().trim() | ||||
|  | ||||
|  | ||||
| @@ -16,24 +16,23 @@ try { | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| // Include the getXpath script directly, easier than fetching
 | ||||
| function getxpath(e) { | ||||
|         var n = e; | ||||
|         if (n && n.id) return '//*[@id="' + n.id + '"]'; | ||||
|         for (var o = []; n && Node.ELEMENT_NODE === n.nodeType;) { | ||||
|             for (var i = 0, r = !1, d = n.previousSibling; d;) d.nodeType !== Node.DOCUMENT_TYPE_NODE && d.nodeName === n.nodeName && i++, d = d.previousSibling; | ||||
|             for (d = n.nextSibling; d;) { | ||||
|                 if (d.nodeName === n.nodeName) { | ||||
|                     r = !0; | ||||
|                     break | ||||
|                 } | ||||
|                 d = d.nextSibling | ||||
|     var n = e; | ||||
|     if (n && n.id) return '//*[@id="' + n.id + '"]'; | ||||
|     for (var o = []; n && Node.ELEMENT_NODE === n.nodeType;) { | ||||
|         for (var i = 0, r = !1, d = n.previousSibling; d;) d.nodeType !== Node.DOCUMENT_TYPE_NODE && d.nodeName === n.nodeName && i++, d = d.previousSibling; | ||||
|         for (d = n.nextSibling; d;) { | ||||
|             if (d.nodeName === n.nodeName) { | ||||
|                 r = !0; | ||||
|                 break | ||||
|             } | ||||
|             o.push((n.prefix ? n.prefix + ":" : "") + n.localName + (i || r ? "[" + (i + 1) + "]" : "")), n = n.parentNode | ||||
|             d = d.nextSibling | ||||
|         } | ||||
|         return o.length ? "/" + o.reverse().join("/") : "" | ||||
|         o.push((n.prefix ? n.prefix + ":" : "") + n.localName + (i || r ? "[" + (i + 1) + "]" : "")), n = n.parentNode | ||||
|     } | ||||
|     return o.length ? "/" + o.reverse().join("/") : "" | ||||
| } | ||||
| 
 | ||||
| const findUpTag = (el) => { | ||||
|     let r = el | ||||
| @@ -59,14 +58,14 @@ const findUpTag = (el) => { | ||||
| 
 | ||||
|     // Strategy 2: Keep going up until we hit an ID tag, imagine it's like  #list-widget div h4
 | ||||
|     while (r.parentNode) { | ||||
|         if (depth == 5) { | ||||
|         if (depth === 5) { | ||||
|             break; | ||||
|         } | ||||
|         if ('' !== r.id) { | ||||
|             chained_css.unshift("#" + CSS.escape(r.id)); | ||||
|             final_selector = chained_css.join(' > '); | ||||
|             // Be sure theres only one, some sites have multiples of the same ID tag :-(
 | ||||
|             if (window.document.querySelectorAll(final_selector).length == 1) { | ||||
|             if (window.document.querySelectorAll(final_selector).length === 1) { | ||||
|                 return final_selector; | ||||
|             } | ||||
|             return null; | ||||
| @@ -82,30 +81,60 @@ const findUpTag = (el) => { | ||||
| 
 | ||||
| // @todo - if it's SVG or IMG, go into image diff mode
 | ||||
| // %ELEMENTS% replaced at injection time because different interfaces use it with different settings
 | ||||
| var elements = window.document.querySelectorAll("%ELEMENTS%"); | ||||
| 
 | ||||
| var size_pos = []; | ||||
| // after page fetch, inject this JS
 | ||||
| // build a map of all elements and their positions (maybe that only include text?)
 | ||||
| var bbox; | ||||
| for (var i = 0; i < elements.length; i++) { | ||||
|     bbox = elements[i].getBoundingClientRect(); | ||||
| console.log("Scanning %ELEMENTS%"); | ||||
| 
 | ||||
|     // Exclude items that are not interactable or visible
 | ||||
|     if(elements[i].style.opacity === "0") { | ||||
|         continue | ||||
| function collectVisibleElements(parent, visibleElements) { | ||||
|     if (!parent) return; // Base case: if parent is null or undefined, return
 | ||||
| 
 | ||||
| 
 | ||||
|     // Add the parent itself to the visible elements array if it's of the specified types
 | ||||
|     const tagName = parent.tagName.toLowerCase(); | ||||
|     if ("%ELEMENTS%".split(',').includes(tagName)) { | ||||
|         visibleElements.push(parent); | ||||
|     } | ||||
|     if(elements[i].style.display === "none" || elements[i].style.pointerEvents === "none" ) { | ||||
|         continue | ||||
| 
 | ||||
|     // Iterate over the parent's children
 | ||||
|     const children = parent.children; | ||||
|     for (let i = 0; i < children.length; i++) { | ||||
|         const child = children[i]; | ||||
|         if ( | ||||
|             child.nodeType === Node.ELEMENT_NODE && | ||||
|             window.getComputedStyle(child).display !== 'none' && | ||||
|             window.getComputedStyle(child).visibility !== 'hidden' && | ||||
|             child.offsetWidth >= 0 && | ||||
|             child.offsetHeight >= 0 && | ||||
|             window.getComputedStyle(child).contentVisibility !== 'hidden' | ||||
|         ) { | ||||
|             // If the child is an element and is visible, recursively collect visible elements
 | ||||
|             collectVisibleElements(child, visibleElements); | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| // Create an array to hold the visible elements
 | ||||
| const visibleElementsArray = []; | ||||
| 
 | ||||
| // Call collectVisibleElements with the starting parent element
 | ||||
| collectVisibleElements(document.body, visibleElementsArray); | ||||
| 
 | ||||
| 
 | ||||
| visibleElementsArray.forEach(function (element) { | ||||
| 
 | ||||
|     bbox = element.getBoundingClientRect(); | ||||
| 
 | ||||
|     // Skip really small ones, and where width or height ==0
 | ||||
|     if (bbox['width'] * bbox['height'] < 100) { | ||||
|         continue; | ||||
|     if (bbox['width'] * bbox['height'] < 10) { | ||||
|         return | ||||
|     } | ||||
| 
 | ||||
|     // Don't include elements that are offset from canvas
 | ||||
|     if (bbox['top']+scroll_y < 0 || bbox['left'] < 0) { | ||||
|         continue; | ||||
|     if (bbox['top'] + scroll_y < 0 || bbox['left'] < 0) { | ||||
|         return | ||||
|     } | ||||
| 
 | ||||
|     // @todo the getXpath kind of sucks, it doesnt know when there is for example just one ID sometimes
 | ||||
| @@ -114,46 +143,41 @@ for (var i = 0; i < elements.length; i++) { | ||||
| 
 | ||||
|     // 1st primitive - if it has class, try joining it all and select, if theres only one.. well thats us.
 | ||||
|     xpath_result = false; | ||||
| 
 | ||||
|     try { | ||||
|         var d = findUpTag(elements[i]); | ||||
|         var d = findUpTag(element); | ||||
|         if (d) { | ||||
|             xpath_result = d; | ||||
|         } | ||||
|     } catch (e) { | ||||
|         console.log(e); | ||||
|     } | ||||
| 
 | ||||
|     // You could swap it and default to getXpath and then try the smarter one
 | ||||
|     // default back to the less intelligent one
 | ||||
|     if (!xpath_result) { | ||||
|         try { | ||||
|             // I've seen on FB and eBay that this doesnt work
 | ||||
|             // ReferenceError: getXPath is not defined at eval (eval at evaluate (:152:29), <anonymous>:67:20) at UtilityScript.evaluate (<anonymous>:159:18) at UtilityScript.<anonymous> (<anonymous>:1:44)
 | ||||
|             xpath_result = getxpath(elements[i]); | ||||
|             xpath_result = getxpath(element); | ||||
|         } catch (e) { | ||||
|             console.log(e); | ||||
|             continue; | ||||
|             return | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     if (window.getComputedStyle(elements[i]).visibility === "hidden") { | ||||
|         continue; | ||||
|     } | ||||
| 
 | ||||
|     // @todo Possible to ONLY list where it's clickable to save JSON xfer size
 | ||||
|     size_pos.push({ | ||||
|         xpath: xpath_result, | ||||
|         width: Math.round(bbox['width']), | ||||
|         height: Math.round(bbox['height']), | ||||
|         left: Math.floor(bbox['left']), | ||||
|         top: Math.floor(bbox['top'])+scroll_y, | ||||
|         tagName: (elements[i].tagName) ? elements[i].tagName.toLowerCase() : '', | ||||
|         tagtype: (elements[i].tagName == 'INPUT' && elements[i].type) ? elements[i].type.toLowerCase() : '', | ||||
|         isClickable: (elements[i].onclick) || window.getComputedStyle(elements[i]).cursor == "pointer" | ||||
|         top: Math.floor(bbox['top']) + scroll_y, | ||||
|         tagName: (element.tagName) ? element.tagName.toLowerCase() : '', | ||||
|         tagtype: (element.tagName.toLowerCase() === 'input' && element.type) ? element.type.toLowerCase() : '', | ||||
|         isClickable: window.getComputedStyle(element).cursor == "pointer" | ||||
|     }); | ||||
| 
 | ||||
| } | ||||
| }); | ||||
| 
 | ||||
| 
 | ||||
| // Inject the current one set in the include_filters, which may be a CSS rule
 | ||||
| // used for displaying the current one in VisualSelector, where its not one we generated.
 | ||||
| @@ -170,20 +194,33 @@ if (include_filters.length) { | ||||
| 
 | ||||
|         try { | ||||
|             // is it xpath?
 | ||||
|             if (f.startsWith('/') || f.startsWith('xpath:')) { | ||||
|                 q = document.evaluate(f.replace('xpath:', ''), document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue; | ||||
|             if (f.startsWith('/') || f.startsWith('xpath')) { | ||||
|                 var qry_f = f.replace(/xpath(:|\d:)/, '') | ||||
|                 console.log("[xpath] Scanning for included filter " + qry_f) | ||||
|                 q = document.evaluate(qry_f, document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue; | ||||
|             } else { | ||||
|                 console.log("[css] Scanning for included filter " + f) | ||||
|                 q = document.querySelector(f); | ||||
|             } | ||||
|         } catch (e) { | ||||
|             // Maybe catch DOMException and alert?
 | ||||
|             console.log("xpath_element_scraper: Exception selecting element from filter "+f); | ||||
|             console.log("xpath_element_scraper: Exception selecting element from filter " + f); | ||||
|             console.log(e); | ||||
|         } | ||||
| 
 | ||||
|         if (q) { | ||||
|             // Try to resolve //something/text() back to its /something so we can atleast get the bounding box
 | ||||
|             try { | ||||
|                 if (typeof q.nodeName == 'string' && q.nodeName === '#text') { | ||||
|                     q = q.parentElement | ||||
|                 } | ||||
|             } catch (e) { | ||||
|                 console.log(e) | ||||
|                 console.log("xpath_element_scraper: #text resolver") | ||||
|             } | ||||
| 
 | ||||
|             // #1231 - IN the case XPath attribute filter is applied, we will have to traverse up and find the element.
 | ||||
|             if (q.hasOwnProperty('getBoundingClientRect')) { | ||||
|             if (typeof q.getBoundingClientRect == 'function') { | ||||
|                 bbox = q.getBoundingClientRect(); | ||||
|                 console.log("xpath_element_scraper: Got filter element, scroll from top was " + scroll_y) | ||||
|             } else { | ||||
| @@ -192,12 +229,13 @@ if (include_filters.length) { | ||||
|                     bbox = q.ownerElement.getBoundingClientRect(); | ||||
|                     console.log("xpath_element_scraper: Got filter by ownerElement element, scroll from top was " + scroll_y) | ||||
|                 } catch (e) { | ||||
|                     console.log("xpath_element_scraper: error looking up ownerElement") | ||||
|                     console.log(e) | ||||
|                     console.log("xpath_element_scraper: error looking up q.ownerElement") | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|          | ||||
|         if(!q) { | ||||
| 
 | ||||
|         if (!q) { | ||||
|             console.log("xpath_element_scraper: filter element " + f + " was not found"); | ||||
|         } | ||||
| 
 | ||||
| @@ -207,7 +245,7 @@ if (include_filters.length) { | ||||
|                 width: parseInt(bbox['width']), | ||||
|                 height: parseInt(bbox['height']), | ||||
|                 left: parseInt(bbox['left']), | ||||
|                 top: parseInt(bbox['top'])+scroll_y | ||||
|                 top: parseInt(bbox['top']) + scroll_y | ||||
|             }); | ||||
|         } | ||||
|     } | ||||
| @@ -215,7 +253,7 @@ if (include_filters.length) { | ||||
| 
 | ||||
| // Sort the elements so we find the smallest one first, in other words, we find the smallest one matching in that area
 | ||||
| // so that we dont select the wrapping element by mistake and be unable to select what we want
 | ||||
| size_pos.sort((a, b) => (a.width*a.height > b.width*b.height) ? 1 : -1) | ||||
| size_pos.sort((a, b) => (a.width * a.height > b.width * b.height) ? 1 : -1) | ||||
| 
 | ||||
| // Window.width required for proper scaling in the frontend
 | ||||
| return {'size_pos': size_pos, 'browser_width': window.innerWidth}; | ||||
							
								
								
									
										119
									
								
								changedetectionio/content_fetchers/webdriver_selenium.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,119 @@ | ||||
| import os | ||||
| import time | ||||
|  | ||||
| from loguru import logger | ||||
| from changedetectionio.content_fetchers.base import Fetcher | ||||
|  | ||||
| class fetcher(Fetcher): | ||||
|     if os.getenv("WEBDRIVER_URL"): | ||||
|         fetcher_description = "WebDriver Chrome/Javascript via '{}'".format(os.getenv("WEBDRIVER_URL")) | ||||
|     else: | ||||
|         fetcher_description = "WebDriver Chrome/Javascript" | ||||
|  | ||||
|     # Configs for Proxy setup | ||||
|     # In the ENV vars, is prefixed with "webdriver_", so it is for example "webdriver_sslProxy" | ||||
|     selenium_proxy_settings_mappings = ['proxyType', 'ftpProxy', 'httpProxy', 'noProxy', | ||||
|                                         'proxyAutoconfigUrl', 'sslProxy', 'autodetect', | ||||
|                                         'socksProxy', 'socksVersion', 'socksUsername', 'socksPassword'] | ||||
|     proxy = None | ||||
|  | ||||
|     def __init__(self, proxy_override=None, custom_browser_connection_url=None): | ||||
|         super().__init__() | ||||
|         from selenium.webdriver.common.proxy import Proxy as SeleniumProxy | ||||
|  | ||||
|         # .strip('"') is going to save someone a lot of time when they accidently wrap the env value | ||||
|         if not custom_browser_connection_url: | ||||
|             self.browser_connection_url = os.getenv("WEBDRIVER_URL", 'http://browser-chrome:4444/wd/hub').strip('"') | ||||
|         else: | ||||
|             self.browser_connection_is_custom = True | ||||
|             self.browser_connection_url = custom_browser_connection_url | ||||
|  | ||||
|         # If any proxy settings are enabled, then we should setup the proxy object | ||||
|         proxy_args = {} | ||||
|         for k in self.selenium_proxy_settings_mappings: | ||||
|             v = os.getenv('webdriver_' + k, False) | ||||
|             if v: | ||||
|                 proxy_args[k] = v.strip('"') | ||||
|  | ||||
|         # Map back standard HTTP_ and HTTPS_PROXY to webDriver httpProxy/sslProxy | ||||
|         if not proxy_args.get('webdriver_httpProxy') and self.system_http_proxy: | ||||
|             proxy_args['httpProxy'] = self.system_http_proxy | ||||
|         if not proxy_args.get('webdriver_sslProxy') and self.system_https_proxy: | ||||
|             proxy_args['httpsProxy'] = self.system_https_proxy | ||||
|  | ||||
|         # Allows override the proxy on a per-request basis | ||||
|         if proxy_override is not None: | ||||
|             proxy_args['httpProxy'] = proxy_override | ||||
|  | ||||
|         if proxy_args: | ||||
|             self.proxy = SeleniumProxy(raw=proxy_args) | ||||
|  | ||||
|     def run(self, | ||||
|             url, | ||||
|             timeout, | ||||
|             request_headers, | ||||
|             request_body, | ||||
|             request_method, | ||||
|             ignore_status_codes=False, | ||||
|             current_include_filters=None, | ||||
|             is_binary=False): | ||||
|  | ||||
|         from selenium import webdriver | ||||
|         from selenium.webdriver.chrome.options import Options as ChromeOptions | ||||
|         from selenium.common.exceptions import WebDriverException | ||||
|         # request_body, request_method unused for now, until some magic in the future happens. | ||||
|  | ||||
|         options = ChromeOptions() | ||||
|         if self.proxy: | ||||
|             options.proxy = self.proxy | ||||
|  | ||||
|         self.driver = webdriver.Remote( | ||||
|             command_executor=self.browser_connection_url, | ||||
|             options=options) | ||||
|  | ||||
|         try: | ||||
|             self.driver.get(url) | ||||
|         except WebDriverException as e: | ||||
|             # Be sure we close the session window | ||||
|             self.quit() | ||||
|             raise | ||||
|  | ||||
|         self.driver.set_window_size(1280, 1024) | ||||
|         self.driver.implicitly_wait(int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5))) | ||||
|  | ||||
|         if self.webdriver_js_execute_code is not None: | ||||
|             self.driver.execute_script(self.webdriver_js_execute_code) | ||||
|             # Selenium doesn't automatically wait for actions as good as Playwright, so wait again | ||||
|             self.driver.implicitly_wait(int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5))) | ||||
|  | ||||
|         # @todo - how to check this? is it possible? | ||||
|         self.status_code = 200 | ||||
|         # @todo somehow we should try to get this working for WebDriver | ||||
|         # raise EmptyReply(url=url, status_code=r.status_code) | ||||
|  | ||||
|         # @todo - dom wait loaded? | ||||
|         time.sleep(int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay) | ||||
|         self.content = self.driver.page_source | ||||
|         self.headers = {} | ||||
|  | ||||
|         self.screenshot = self.driver.get_screenshot_as_png() | ||||
|  | ||||
|     # Does the connection to the webdriver work? run a test connection. | ||||
|     def is_ready(self): | ||||
|         from selenium import webdriver | ||||
|         from selenium.webdriver.chrome.options import Options as ChromeOptions | ||||
|  | ||||
|         self.driver = webdriver.Remote( | ||||
|             command_executor=self.command_executor, | ||||
|             options=ChromeOptions()) | ||||
|  | ||||
|         # driver.quit() seems to cause better exceptions | ||||
|         self.quit() | ||||
|         return True | ||||
|  | ||||
|     def quit(self): | ||||
|         if self.driver: | ||||
|             try: | ||||
|                 self.driver.quit() | ||||
|             except Exception as e: | ||||
|                 logger.debug(f"Content Fetcher > Exception in chrome shutdown/quit {str(e)}") | ||||
							
								
								
									
										1762
									
								
								changedetectionio/flask_app.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -27,7 +27,7 @@ from validators.url import url as url_validator | ||||
| # each select <option data-enabled="enabled-0-0" | ||||
| from changedetectionio.blueprint.browser_steps.browser_steps import browser_step_ui_config | ||||
|  | ||||
| from changedetectionio import content_fetcher, html_tools | ||||
| from changedetectionio import html_tools, content_fetchers | ||||
|  | ||||
| from changedetectionio.notification import ( | ||||
|     valid_notification_formats, | ||||
| @@ -43,6 +43,7 @@ valid_method = { | ||||
|     'PUT', | ||||
|     'PATCH', | ||||
|     'DELETE', | ||||
|     'OPTIONS', | ||||
| } | ||||
|  | ||||
| default_method = 'GET' | ||||
| @@ -166,33 +167,31 @@ class ValidateContentFetcherIsReady(object): | ||||
|         self.message = message | ||||
|  | ||||
|     def __call__(self, form, field): | ||||
|         import urllib3.exceptions | ||||
|         from changedetectionio import content_fetcher | ||||
|         return | ||||
|  | ||||
| # AttributeError: module 'changedetectionio.content_fetcher' has no attribute 'extra_browser_unlocked<>ASDF213r123r' | ||||
|         # Better would be a radiohandler that keeps a reference to each class | ||||
|         if field.data is not None and field.data != 'system': | ||||
|             klass = getattr(content_fetcher, field.data) | ||||
|             some_object = klass() | ||||
|             try: | ||||
|                 ready = some_object.is_ready() | ||||
|  | ||||
|             except urllib3.exceptions.MaxRetryError as e: | ||||
|                 driver_url = some_object.command_executor | ||||
|                 message = field.gettext('Content fetcher \'%s\' did not respond.' % (field.data)) | ||||
|                 message += '<br>' + field.gettext( | ||||
|                     'Be sure that the selenium/webdriver runner is running and accessible via network from this container/host.') | ||||
|                 message += '<br>' + field.gettext('Did you follow the instructions in the wiki?') | ||||
|                 message += '<br><br>' + field.gettext('WebDriver Host: %s' % (driver_url)) | ||||
|                 message += '<br><a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">Go here for more information</a>' | ||||
|                 message += '<br>'+field.gettext('Content fetcher did not respond properly, unable to use it.\n %s' % (str(e))) | ||||
|  | ||||
|                 raise ValidationError(message) | ||||
|  | ||||
|             except Exception as e: | ||||
|                 message = field.gettext('Content fetcher \'%s\' did not respond properly, unable to use it.\n %s') | ||||
|                 raise ValidationError(message % (field.data, e)) | ||||
|         # if field.data is not None and field.data != 'system': | ||||
|         #     klass = getattr(content_fetcher, field.data) | ||||
|         #     some_object = klass() | ||||
|         #     try: | ||||
|         #         ready = some_object.is_ready() | ||||
|         # | ||||
|         #     except urllib3.exceptions.MaxRetryError as e: | ||||
|         #         driver_url = some_object.command_executor | ||||
|         #         message = field.gettext('Content fetcher \'%s\' did not respond.' % (field.data)) | ||||
|         #         message += '<br>' + field.gettext( | ||||
|         #             'Be sure that the selenium/webdriver runner is running and accessible via network from this container/host.') | ||||
|         #         message += '<br>' + field.gettext('Did you follow the instructions in the wiki?') | ||||
|         #         message += '<br><br>' + field.gettext('WebDriver Host: %s' % (driver_url)) | ||||
|         #         message += '<br><a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">Go here for more information</a>' | ||||
|         #         message += '<br>'+field.gettext('Content fetcher did not respond properly, unable to use it.\n %s' % (str(e))) | ||||
|         # | ||||
|         #         raise ValidationError(message) | ||||
|         # | ||||
|         #     except Exception as e: | ||||
|         #         message = field.gettext('Content fetcher \'%s\' did not respond properly, unable to use it.\n %s') | ||||
|         #         raise ValidationError(message % (field.data, e)) | ||||
|  | ||||
|  | ||||
| class ValidateNotificationBodyAndTitleWhenURLisSet(object): | ||||
| @@ -351,7 +350,7 @@ class ValidateCSSJSONXPATHInput(object): | ||||
|                     raise ValidationError("XPath not permitted in this field!") | ||||
|                 from lxml import etree, html | ||||
|                 tree = html.fromstring("<html></html>") | ||||
|                 line = line.replace('xpath1:', '') | ||||
|                 line = re.sub(r'^xpath1:', '', line) | ||||
|  | ||||
|                 try: | ||||
|                     tree.xpath(line.strip()) | ||||
| @@ -420,7 +419,7 @@ class commonSettingsForm(Form): | ||||
|     notification_title = StringField('Notification Title', default='ChangeDetection.io Notification - {{ watch_url }}', validators=[validators.Optional(), ValidateJinja2Template()]) | ||||
|     notification_body = TextAreaField('Notification Body', default='{{ watch_url }} had a change.', validators=[validators.Optional(), ValidateJinja2Template()]) | ||||
|     notification_format = SelectField('Notification format', choices=valid_notification_formats.keys()) | ||||
|     fetch_backend = RadioField(u'Fetch Method', choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()]) | ||||
|     fetch_backend = RadioField(u'Fetch Method', choices=content_fetchers.available_fetchers(), validators=[ValidateContentFetcherIsReady()]) | ||||
|     extract_title_as_title = BooleanField('Extract <title> from document and use as watch title', default=False) | ||||
|     webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, | ||||
|                                                                                                                                     message="Should contain one or more seconds")]) | ||||
| @@ -464,6 +463,7 @@ class watchForm(commonSettingsForm): | ||||
|     method = SelectField('Request method', choices=valid_method, default=default_method) | ||||
|     ignore_status_codes = BooleanField('Ignore status codes (process non-2xx status codes as normal)', default=False) | ||||
|     check_unique_lines = BooleanField('Only trigger when unique lines appear', default=False) | ||||
|     sort_text_alphabetically =  BooleanField('Sort text alphabetically', default=False) | ||||
|  | ||||
|     filter_text_added = BooleanField('Added lines', default=True) | ||||
|     filter_text_replaced = BooleanField('Replaced/changed lines', default=True) | ||||
| @@ -550,7 +550,7 @@ class globalSettingsApplicationForm(commonSettingsForm): | ||||
|                            render_kw={"placeholder": os.getenv('BASE_URL', 'Not set')} | ||||
|                            ) | ||||
|     empty_pages_are_a_change =  BooleanField('Treat empty pages as a change?', default=False) | ||||
|     fetch_backend = RadioField('Fetch Method', default="html_requests", choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()]) | ||||
|     fetch_backend = RadioField('Fetch Method', default="html_requests", choices=content_fetchers.available_fetchers(), validators=[ValidateContentFetcherIsReady()]) | ||||
|     global_ignore_text = StringListField('Ignore Text', [ValidateListRegex()]) | ||||
|     global_subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)]) | ||||
|     ignore_whitespace = BooleanField('Ignore whitespace') | ||||
|   | ||||
| @@ -2,6 +2,7 @@ from abc import ABC, abstractmethod | ||||
| import time | ||||
| import validators | ||||
| from wtforms import ValidationError | ||||
| from loguru import logger | ||||
|  | ||||
| from changedetectionio.forms import validate_url | ||||
|  | ||||
| @@ -56,7 +57,7 @@ class import_url_list(Importer): | ||||
|  | ||||
|             # Flask wtform validators wont work with basic auth, use validators package | ||||
|             # Up to 5000 per batch so we dont flood the server | ||||
|             # @todo validators.url failed on local hostnames (such as referring to ourself when using browserless) | ||||
|             # @todo validators.url will fail when you add your own IP etc | ||||
|             if len(url) and 'http' in url.lower() and good < 5000: | ||||
|                 extras = None | ||||
|                 if processor: | ||||
| @@ -195,7 +196,7 @@ class import_xlsx_wachete(Importer): | ||||
|                     try: | ||||
|                         validate_url(data.get('url')) | ||||
|                     except ValidationError as e: | ||||
|                         print(">> import URL error", data.get('url'), str(e)) | ||||
|                         logger.error(f">> Import URL error {data.get('url')} {str(e)}") | ||||
|                         flash(f"Error processing row number {row_id}, URL value was incorrect, row was skipped.", 'error') | ||||
|                         # Don't bother processing anything else on this row | ||||
|                         continue | ||||
| @@ -209,7 +210,7 @@ class import_xlsx_wachete(Importer): | ||||
|                         self.new_uuids.append(new_uuid) | ||||
|                         good += 1 | ||||
|             except Exception as e: | ||||
|                 print(e) | ||||
|                 logger.error(e) | ||||
|                 flash(f"Error processing row number {row_id}, check all cell data types are correct, row was skipped.", 'error') | ||||
|             else: | ||||
|                 row_id += 1 | ||||
| @@ -264,7 +265,7 @@ class import_xlsx_custom(Importer): | ||||
|                         try: | ||||
|                             validate_url(url) | ||||
|                         except ValidationError as e: | ||||
|                             print(">> Import URL error", url, str(e)) | ||||
|                             logger.error(f">> Import URL error {url} {str(e)}") | ||||
|                             flash(f"Error processing row number {row_i}, URL value was incorrect, row was skipped.", 'error') | ||||
|                             # Don't bother processing anything else on this row | ||||
|                             url = None | ||||
| @@ -293,7 +294,7 @@ class import_xlsx_custom(Importer): | ||||
|                         self.new_uuids.append(new_uuid) | ||||
|                         good += 1 | ||||
|         except Exception as e: | ||||
|             print(e) | ||||
|             logger.error(e) | ||||
|             flash(f"Error processing row number {row_i}, check all cell data types are correct, row was skipped.", 'error') | ||||
|         else: | ||||
|             row_i += 1 | ||||
|   | ||||
| @@ -1,10 +1,10 @@ | ||||
| from distutils.util import strtobool | ||||
| import logging | ||||
| import os | ||||
| import re | ||||
| import time | ||||
| import uuid | ||||
| from pathlib import Path | ||||
| from loguru import logger | ||||
|  | ||||
| # Allowable protocols, protects against javascript: etc | ||||
| # file:// is further checked by ALLOW_FILE_URI | ||||
| @@ -38,12 +38,14 @@ base_config = { | ||||
|     'track_ldjson_price_data': None, | ||||
|     'headers': {},  # Extra headers to send | ||||
|     'ignore_text': [],  # List of text to ignore when calculating the comparison checksum | ||||
|     'in_stock' : None, | ||||
|     'in_stock_only' : True, # Only trigger change on going to instock from out-of-stock | ||||
|     'include_filters': [], | ||||
|     'last_checked': 0, | ||||
|     'last_error': False, | ||||
|     'last_viewed': 0,  # history key value of the last viewed via the [diff] link | ||||
|     'method': 'GET', | ||||
|     'notification_alert_count': 0, | ||||
|     # Custom notification content | ||||
|     'notification_body': None, | ||||
|     'notification_format': default_notification_format_for_watch, | ||||
| @@ -55,6 +57,8 @@ base_config = { | ||||
|     'previous_md5': False, | ||||
|     'previous_md5_before_filters': False,  # Used for skipping changedetection entirely | ||||
|     'proxy': None,  # Preferred proxy connection | ||||
|     'remote_server_reply': None, # From 'server' reply header | ||||
|     'sort_text_alphabetically': False, | ||||
|     'subtractive_selectors': [], | ||||
|     'tag': '', # Old system of text name for a tag, to be removed | ||||
|     'tags': [], # list of UUIDs to App.Tags | ||||
| @@ -113,14 +117,15 @@ class model(dict): | ||||
|  | ||||
|     @property | ||||
|     def viewed(self): | ||||
|         if int(self['last_viewed']) >= int(self.newest_history_key) : | ||||
|         # Don't return viewed when last_viewed is 0 and newest_key is 0 | ||||
|         if int(self['last_viewed']) and int(self['last_viewed']) >= int(self.newest_history_key) : | ||||
|             return True | ||||
|  | ||||
|         return False | ||||
|  | ||||
|     def ensure_data_dir_exists(self): | ||||
|         if not os.path.isdir(self.watch_data_dir): | ||||
|             print ("> Creating data dir {}".format(self.watch_data_dir)) | ||||
|             logger.debug(f"> Creating data dir {self.watch_data_dir}") | ||||
|             os.mkdir(self.watch_data_dir) | ||||
|  | ||||
|     @property | ||||
| @@ -209,7 +214,7 @@ class model(dict): | ||||
|         # Read the history file as a dict | ||||
|         fname = os.path.join(self.watch_data_dir, "history.txt") | ||||
|         if os.path.isfile(fname): | ||||
|             logging.debug("Reading history index " + str(time.time())) | ||||
|             logger.debug(f"Reading watch history index for {self.get('uuid')}") | ||||
|             with open(fname, "r") as f: | ||||
|                 for i in f.readlines(): | ||||
|                     if ',' in i: | ||||
| @@ -244,10 +249,10 @@ class model(dict): | ||||
|     @property | ||||
|     def has_browser_steps(self): | ||||
|         has_browser_steps = self.get('browser_steps') and list(filter( | ||||
|                 lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'), | ||||
|                 self.get('browser_steps'))) | ||||
|             lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'), | ||||
|             self.get('browser_steps'))) | ||||
|  | ||||
|         return  has_browser_steps | ||||
|         return has_browser_steps | ||||
|  | ||||
|     # Returns the newest key, but if theres only 1 record, then it's counted as not being new, so return 0. | ||||
|     @property | ||||
| @@ -262,6 +267,38 @@ class model(dict): | ||||
|         bump = self.history | ||||
|         return self.__newest_history_key | ||||
|  | ||||
|     # Given an arbitrary timestamp, find the closest next key | ||||
|     # For example, last_viewed = 1000 so it should return the next 1001 timestamp | ||||
|     # | ||||
|     # used for the [diff] button so it can preset a smarter from_version | ||||
|     @property | ||||
|     def get_next_snapshot_key_to_last_viewed(self): | ||||
|  | ||||
|         """Unfortunately for now timestamp is stored as string key""" | ||||
|         keys = list(self.history.keys()) | ||||
|         if not keys: | ||||
|             return None | ||||
|  | ||||
|         last_viewed = int(self.get('last_viewed')) | ||||
|         prev_k = keys[0] | ||||
|         sorted_keys = sorted(keys, key=lambda x: int(x)) | ||||
|         sorted_keys.reverse() | ||||
|  | ||||
|         # When the 'last viewed' timestamp is greater than the newest snapshot, return second last | ||||
|         if last_viewed > int(sorted_keys[0]): | ||||
|             return sorted_keys[1] | ||||
|  | ||||
|         for k in sorted_keys: | ||||
|             if int(k) < last_viewed: | ||||
|                 if prev_k == sorted_keys[0]: | ||||
|                     # Return the second last one so we dont recommend the same version compares itself | ||||
|                     return sorted_keys[1] | ||||
|  | ||||
|                 return prev_k | ||||
|             prev_k = k | ||||
|  | ||||
|         return keys[0] | ||||
|  | ||||
|     def get_history_snapshot(self, timestamp): | ||||
|         import brotli | ||||
|         filepath = self.history[timestamp] | ||||
|   | ||||
| @@ -1,7 +1,9 @@ | ||||
| import apprise | ||||
| import time | ||||
| from jinja2 import Environment, BaseLoader | ||||
| from apprise import NotifyFormat | ||||
| import json | ||||
| from loguru import logger | ||||
|  | ||||
| valid_tokens = { | ||||
|     'base_url': '', | ||||
| @@ -46,6 +48,9 @@ from apprise.decorators import notify | ||||
| @notify(on="puts") | ||||
| def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs): | ||||
|     import requests | ||||
|     from apprise.utils import parse_url as apprise_parse_url | ||||
|     from apprise.URLBase import URLBase | ||||
|  | ||||
|     url = kwargs['meta'].get('url') | ||||
|  | ||||
|     if url.startswith('post'): | ||||
| @@ -68,27 +73,59 @@ def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs): | ||||
|     url = url.replace('delete://', 'http://') | ||||
|     url = url.replace('deletes://', 'https://') | ||||
|  | ||||
|     # Try to auto-guess if it's JSON | ||||
|     headers = {} | ||||
|     params = {} | ||||
|     auth = None | ||||
|  | ||||
|     # Convert /foobar?+some-header=hello to proper header dictionary | ||||
|     results = apprise_parse_url(url) | ||||
|     if results: | ||||
|         # Add our headers that the user can potentially over-ride if they wish | ||||
|         # to to our returned result set and tidy entries by unquoting them | ||||
|         headers = {URLBase.unquote(x): URLBase.unquote(y) | ||||
|                    for x, y in results['qsd+'].items()} | ||||
|  | ||||
|         # https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation | ||||
|         # In Apprise, it relies on prefixing each request arg with "-", because it uses say &method=update as a flag for apprise | ||||
|         # but here we are making straight requests, so we need todo convert this against apprise's logic | ||||
|         for k, v in results['qsd'].items(): | ||||
|             if not k.strip('+-') in results['qsd+'].keys(): | ||||
|                 params[URLBase.unquote(k)] = URLBase.unquote(v) | ||||
|  | ||||
|         # Determine Authentication | ||||
|         auth = '' | ||||
|         if results.get('user') and results.get('password'): | ||||
|             auth = (URLBase.unquote(results.get('user')), URLBase.unquote(results.get('user'))) | ||||
|         elif results.get('user'): | ||||
|             auth = (URLBase.unquote(results.get('user'))) | ||||
|  | ||||
|     # Try to auto-guess if it's JSON | ||||
|     try: | ||||
|         json.loads(body) | ||||
|         headers = {'Content-Type': 'application/json; charset=utf-8'} | ||||
|         headers['Content-Type'] = 'application/json; charset=utf-8' | ||||
|     except ValueError as e: | ||||
|         pass | ||||
|  | ||||
|  | ||||
|     r(url, headers=headers, data=body) | ||||
|     r(results.get('url'), | ||||
|       auth=auth, | ||||
|       data=body, | ||||
|       headers=headers, | ||||
|       params=params | ||||
|       ) | ||||
|  | ||||
|  | ||||
| def process_notification(n_object, datastore): | ||||
|  | ||||
|     now = time.time() | ||||
|     if n_object.get('notification_timestamp'): | ||||
|         logger.trace(f"Time since queued {now-n_object['notification_timestamp']:.3f}s") | ||||
|     # Insert variables into the notification content | ||||
|     notification_parameters = create_notification_parameters(n_object, datastore) | ||||
|  | ||||
|     # Get the notification body from datastore | ||||
|     jinja2_env = Environment(loader=BaseLoader) | ||||
|     n_body = jinja2_env.from_string(n_object.get('notification_body', default_notification_body)).render(**notification_parameters) | ||||
|     n_title = jinja2_env.from_string(n_object.get('notification_title', default_notification_title)).render(**notification_parameters) | ||||
|     n_body = jinja2_env.from_string(n_object.get('notification_body', '')).render(**notification_parameters) | ||||
|     n_title = jinja2_env.from_string(n_object.get('notification_title', '')).render(**notification_parameters) | ||||
|     n_format = valid_notification_formats.get( | ||||
|         n_object.get('notification_format', default_notification_format), | ||||
|         valid_notification_formats[default_notification_format], | ||||
| @@ -99,103 +136,114 @@ def process_notification(n_object, datastore): | ||||
|         # Initially text or whatever | ||||
|         n_format = datastore.data['settings']['application'].get('notification_format', valid_notification_formats[default_notification_format]) | ||||
|  | ||||
|     logger.trace(f"Complete notification body including Jinja and placeholders calculated in  {time.time() - now:.3f}s") | ||||
|  | ||||
|     # https://github.com/caronc/apprise/wiki/Development_LogCapture | ||||
|     # Anything higher than or equal to WARNING (which covers things like Connection errors) | ||||
|     # raise it as an exception | ||||
|     apobjs=[] | ||||
|     sent_objs=[] | ||||
|  | ||||
|     sent_objs = [] | ||||
|     from .apprise_asset import asset | ||||
|     for url in n_object['notification_urls']: | ||||
|         url = jinja2_env.from_string(url).render(**notification_parameters) | ||||
|         apobj = apprise.Apprise(debug=True, asset=asset) | ||||
|         url = url.strip() | ||||
|         if len(url): | ||||
|             print(">> Process Notification: AppRise notifying {}".format(url)) | ||||
|             with apprise.LogCapture(level=apprise.logging.DEBUG) as logs: | ||||
|                 # Re 323 - Limit discord length to their 2000 char limit total or it wont send. | ||||
|                 # Because different notifications may require different pre-processing, run each sequentially :( | ||||
|                 # 2000 bytes minus - | ||||
|                 #     200 bytes for the overhead of the _entire_ json payload, 200 bytes for {tts, wait, content} etc headers | ||||
|                 #     Length of URL - Incase they specify a longer custom avatar_url | ||||
|     apobj = apprise.Apprise(debug=True, asset=asset) | ||||
|  | ||||
|                 # So if no avatar_url is specified, add one so it can be correctly calculated into the total payload | ||||
|                 k = '?' if not '?' in url else '&' | ||||
|                 if not 'avatar_url' in url \ | ||||
|                         and not url.startswith('mail') \ | ||||
|                         and not url.startswith('post') \ | ||||
|                         and not url.startswith('get') \ | ||||
|                         and not url.startswith('delete') \ | ||||
|                         and not url.startswith('put'): | ||||
|                     url += k + 'avatar_url=https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png' | ||||
|     if not n_object.get('notification_urls'): | ||||
|         return None | ||||
|  | ||||
|                 if url.startswith('tgram://'): | ||||
|                     # Telegram only supports a limit subset of HTML, remove the '<br>' we place in. | ||||
|                     # re https://github.com/dgtlmoon/changedetection.io/issues/555 | ||||
|                     # @todo re-use an existing library we have already imported to strip all non-allowed tags | ||||
|                     n_body = n_body.replace('<br>', '\n') | ||||
|                     n_body = n_body.replace('</br>', '\n') | ||||
|                     # real limit is 4096, but minus some for extra metadata | ||||
|                     payload_max_size = 3600 | ||||
|                     body_limit = max(0, payload_max_size - len(n_title)) | ||||
|                     n_title = n_title[0:payload_max_size] | ||||
|                     n_body = n_body[0:body_limit] | ||||
|     with apprise.LogCapture(level=apprise.logging.DEBUG) as logs: | ||||
|         for url in n_object['notification_urls']: | ||||
|             url = url.strip() | ||||
|             if not url: | ||||
|                 logger.warning(f"Process Notification: skipping empty notification URL.") | ||||
|                 continue | ||||
|  | ||||
|                 elif url.startswith('discord://') or url.startswith('https://discordapp.com/api/webhooks') or url.startswith('https://discord.com/api'): | ||||
|                     # real limit is 2000, but minus some for extra metadata | ||||
|                     payload_max_size = 1700 | ||||
|                     body_limit = max(0, payload_max_size - len(n_title)) | ||||
|                     n_title = n_title[0:payload_max_size] | ||||
|                     n_body = n_body[0:body_limit] | ||||
|             logger.info(">> Process Notification: AppRise notifying {}".format(url)) | ||||
|             url = jinja2_env.from_string(url).render(**notification_parameters) | ||||
|  | ||||
|                 elif url.startswith('mailto'): | ||||
|                     # Apprise will default to HTML, so we need to override it | ||||
|                     # So that whats' generated in n_body is in line with what is going to be sent. | ||||
|                     # https://github.com/caronc/apprise/issues/633#issuecomment-1191449321 | ||||
|                     if not 'format=' in url and (n_format == 'Text' or n_format == 'Markdown'): | ||||
|                         prefix = '?' if not '?' in url else '&' | ||||
|                         # Apprise format is lowercase text https://github.com/caronc/apprise/issues/633 | ||||
|                         n_format = n_format.tolower() | ||||
|                         url = "{}{}format={}".format(url, prefix, n_format) | ||||
|                     # If n_format == HTML, then apprise email should default to text/html and we should be sending HTML only | ||||
|             # Re 323 - Limit discord length to their 2000 char limit total or it wont send. | ||||
|             # Because different notifications may require different pre-processing, run each sequentially :( | ||||
|             # 2000 bytes minus - | ||||
|             #     200 bytes for the overhead of the _entire_ json payload, 200 bytes for {tts, wait, content} etc headers | ||||
|             #     Length of URL - Incase they specify a longer custom avatar_url | ||||
|  | ||||
|                 apobj.add(url) | ||||
|             # So if no avatar_url is specified, add one so it can be correctly calculated into the total payload | ||||
|             k = '?' if not '?' in url else '&' | ||||
|             if not 'avatar_url' in url \ | ||||
|                     and not url.startswith('mail') \ | ||||
|                     and not url.startswith('post') \ | ||||
|                     and not url.startswith('get') \ | ||||
|                     and not url.startswith('delete') \ | ||||
|                     and not url.startswith('put'): | ||||
|                 url += k + 'avatar_url=https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png' | ||||
|  | ||||
|                 apobj.notify( | ||||
|                     title=n_title, | ||||
|                     body=n_body, | ||||
|                     body_format=n_format, | ||||
|                     # False is not an option for AppRise, must be type None | ||||
|                     attach=n_object.get('screenshot', None) | ||||
|                 ) | ||||
|             if url.startswith('tgram://'): | ||||
|                 # Telegram only supports a limit subset of HTML, remove the '<br>' we place in. | ||||
|                 # re https://github.com/dgtlmoon/changedetection.io/issues/555 | ||||
|                 # @todo re-use an existing library we have already imported to strip all non-allowed tags | ||||
|                 n_body = n_body.replace('<br>', '\n') | ||||
|                 n_body = n_body.replace('</br>', '\n') | ||||
|                 # real limit is 4096, but minus some for extra metadata | ||||
|                 payload_max_size = 3600 | ||||
|                 body_limit = max(0, payload_max_size - len(n_title)) | ||||
|                 n_title = n_title[0:payload_max_size] | ||||
|                 n_body = n_body[0:body_limit] | ||||
|  | ||||
|                 apobj.clear() | ||||
|             elif url.startswith('discord://') or url.startswith('https://discordapp.com/api/webhooks') or url.startswith( | ||||
|                     'https://discord.com/api'): | ||||
|                 # real limit is 2000, but minus some for extra metadata | ||||
|                 payload_max_size = 1700 | ||||
|                 body_limit = max(0, payload_max_size - len(n_title)) | ||||
|                 n_title = n_title[0:payload_max_size] | ||||
|                 n_body = n_body[0:body_limit] | ||||
|  | ||||
|                 # Incase it needs to exist in memory for a while after to process(?) | ||||
|                 apobjs.append(apobj) | ||||
|             elif url.startswith('mailto'): | ||||
|                 # Apprise will default to HTML, so we need to override it | ||||
|                 # So that whats' generated in n_body is in line with what is going to be sent. | ||||
|                 # https://github.com/caronc/apprise/issues/633#issuecomment-1191449321 | ||||
|                 if not 'format=' in url and (n_format == 'Text' or n_format == 'Markdown'): | ||||
|                     prefix = '?' if not '?' in url else '&' | ||||
|                     # Apprise format is lowercase text https://github.com/caronc/apprise/issues/633 | ||||
|                     n_format = n_format.lower() | ||||
|                     url = f"{url}{prefix}format={n_format}" | ||||
|                 # If n_format == HTML, then apprise email should default to text/html and we should be sending HTML only | ||||
|  | ||||
|                 # Returns empty string if nothing found, multi-line string otherwise | ||||
|                 log_value = logs.getvalue() | ||||
|                 if log_value and 'WARNING' in log_value or 'ERROR' in log_value: | ||||
|                     raise Exception(log_value) | ||||
|             apobj.add(url) | ||||
|  | ||||
|                 sent_objs.append({'title': n_title, | ||||
|                                   'body': n_body, | ||||
|                                   'url' : url, | ||||
|                                   'body_format': n_format}) | ||||
|             sent_objs.append({'title': n_title, | ||||
|                               'body': n_body, | ||||
|                               'url': url, | ||||
|                               'body_format': n_format}) | ||||
|  | ||||
|         # Blast off the notifications tht are set in .add() | ||||
|         apobj.notify( | ||||
|             title=n_title, | ||||
|             body=n_body, | ||||
|             body_format=n_format, | ||||
|             # False is not an option for AppRise, must be type None | ||||
|             attach=n_object.get('screenshot', None) | ||||
|         ) | ||||
|  | ||||
|         # Give apprise time to register an error | ||||
|         time.sleep(3) | ||||
|  | ||||
|         # Returns empty string if nothing found, multi-line string otherwise | ||||
|         log_value = logs.getvalue() | ||||
|  | ||||
|         if log_value and 'WARNING' in log_value or 'ERROR' in log_value: | ||||
|             raise Exception(log_value) | ||||
|  | ||||
|     # Return what was sent for better logging - after the for loop | ||||
|     return sent_objs | ||||
|  | ||||
|  | ||||
| # Notification title + body content parameters get created here. | ||||
| # ( Where we prepare the tokens in the notification to be replaced with actual values ) | ||||
| def create_notification_parameters(n_object, datastore): | ||||
|     from copy import deepcopy | ||||
|  | ||||
|     # in the case we send a test notification from the main settings, there is no UUID. | ||||
|     uuid = n_object['uuid'] if 'uuid' in n_object else '' | ||||
|  | ||||
|     if uuid != '': | ||||
|     if uuid: | ||||
|         watch_title = datastore.data['watching'][uuid].get('title', '') | ||||
|         tag_list = [] | ||||
|         tags = datastore.get_all_tags_for_watch(uuid) | ||||
| @@ -223,7 +271,7 @@ def create_notification_parameters(n_object, datastore): | ||||
|     tokens.update( | ||||
|         { | ||||
|             'base_url': base_url, | ||||
|             'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else '', | ||||
|             'current_snapshot': n_object.get('current_snapshot', ''), | ||||
|             'diff': n_object.get('diff', ''),  # Null default in the case we use a test | ||||
|             'diff_added': n_object.get('diff_added', ''),  # Null default in the case we use a test | ||||
|             'diff_full': n_object.get('diff_full', ''),  # Null default in the case we use a test | ||||
|   | ||||
| @@ -2,9 +2,9 @@ from abc import abstractmethod | ||||
| import os | ||||
| import hashlib | ||||
| import re | ||||
| from changedetectionio import content_fetcher | ||||
| from copy import deepcopy | ||||
| from distutils.util import strtobool | ||||
| from loguru import logger | ||||
|  | ||||
| class difference_detection_processor(): | ||||
|  | ||||
| @@ -43,33 +43,49 @@ class difference_detection_processor(): | ||||
|  | ||||
|         # In the case that the preferred fetcher was a browser config with custom connection URL.. | ||||
|         # @todo - on save watch, if its extra_browser_ then it should be obvious it will use playwright (like if its requests now..) | ||||
|         browser_connection_url = None | ||||
|         custom_browser_connection_url = None | ||||
|         if prefer_fetch_backend.startswith('extra_browser_'): | ||||
|             (t, key) = prefer_fetch_backend.split('extra_browser_') | ||||
|             connection = list( | ||||
|                 filter(lambda s: (s['browser_name'] == key), self.datastore.data['settings']['requests'].get('extra_browsers', []))) | ||||
|             if connection: | ||||
|                 prefer_fetch_backend = 'base_html_playwright' | ||||
|                 browser_connection_url = connection[0].get('browser_connection_url') | ||||
|                 prefer_fetch_backend = 'html_webdriver' | ||||
|                 custom_browser_connection_url = connection[0].get('browser_connection_url') | ||||
|  | ||||
|         # PDF should be html_requests because playwright will serve it up (so far) in a embedded page | ||||
|         # @todo https://github.com/dgtlmoon/changedetection.io/issues/2019 | ||||
|         # @todo needs test to or a fix | ||||
|         if self.watch.is_pdf: | ||||
|            prefer_fetch_backend = "html_requests" | ||||
|  | ||||
|         # Grab the right kind of 'fetcher', (playwright, requests, etc) | ||||
|         if hasattr(content_fetcher, prefer_fetch_backend): | ||||
|             fetcher_obj = getattr(content_fetcher, prefer_fetch_backend) | ||||
|         from changedetectionio import content_fetchers | ||||
|         if hasattr(content_fetchers, prefer_fetch_backend): | ||||
|             # @todo TEMPORARY HACK - SWITCH BACK TO PLAYWRIGHT FOR BROWSERSTEPS | ||||
|             if prefer_fetch_backend == 'html_webdriver' and self.watch.has_browser_steps: | ||||
|                 # This is never supported in selenium anyway | ||||
|                 logger.warning("Using playwright fetcher override for possible puppeteer request in browsersteps, because puppetteer:browser steps is incomplete.") | ||||
|                 from changedetectionio.content_fetchers.playwright import fetcher as playwright_fetcher | ||||
|                 fetcher_obj = playwright_fetcher | ||||
|             else: | ||||
|                 fetcher_obj = getattr(content_fetchers, prefer_fetch_backend) | ||||
|         else: | ||||
|             # If the klass doesnt exist, just use a default | ||||
|             fetcher_obj = getattr(content_fetcher, "html_requests") | ||||
|  | ||||
|             # What it referenced doesnt exist, Just use a default | ||||
|             fetcher_obj = getattr(content_fetchers, "html_requests") | ||||
|  | ||||
|         proxy_url = None | ||||
|         if preferred_proxy_id: | ||||
|             proxy_url = self.datastore.proxy_list.get(preferred_proxy_id).get('url') | ||||
|             print(f"Using proxy Key: {preferred_proxy_id} as Proxy URL {proxy_url}") | ||||
|             # Custom browser endpoints should NOT have a proxy added | ||||
|             if not prefer_fetch_backend.startswith('extra_browser_'): | ||||
|                 proxy_url = self.datastore.proxy_list.get(preferred_proxy_id).get('url') | ||||
|                 logger.debug(f"Selected proxy key '{preferred_proxy_id}' as proxy URL '{proxy_url}' for {url}") | ||||
|             else: | ||||
|                 logger.debug(f"Skipping adding proxy data when custom Browser endpoint is specified. ") | ||||
|  | ||||
|         # Now call the fetcher (playwright/requests/etc) with arguments that only a fetcher would need. | ||||
|         # When browser_connection_url is None, it method should default to working out whats the best defaults (os env vars etc) | ||||
|         self.fetcher = fetcher_obj(proxy_override=proxy_url, | ||||
|                                    browser_connection_url=browser_connection_url | ||||
|                                    custom_browser_connection_url=custom_browser_connection_url | ||||
|                                    ) | ||||
|  | ||||
|         if self.watch.has_browser_steps: | ||||
|   | ||||
| @@ -1,8 +1,9 @@ | ||||
|  | ||||
| import hashlib | ||||
| import urllib3 | ||||
| from . import difference_detection_processor | ||||
| from copy import deepcopy | ||||
| from loguru import logger | ||||
| import hashlib | ||||
| import urllib3 | ||||
|  | ||||
| urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) | ||||
|  | ||||
| @@ -43,11 +44,13 @@ class perform_site_check(difference_detection_processor): | ||||
|             fetched_md5 = hashlib.md5(self.fetcher.instock_data.encode('utf-8')).hexdigest() | ||||
|             # 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold. | ||||
|             update_obj["in_stock"] = True if self.fetcher.instock_data == 'Possibly in stock' else False | ||||
|             logger.debug(f"Watch UUID {uuid} restock check returned '{self.fetcher.instock_data}' from JS scraper.") | ||||
|         else: | ||||
|             raise UnableToExtractRestockData(status_code=self.fetcher.status_code) | ||||
|  | ||||
|         # The main thing that all this at the moment comes down to :) | ||||
|         changed_detected = False | ||||
|         logger.debug(f"Watch UUID {uuid} restock check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}") | ||||
|  | ||||
|         if watch.get('previous_md5') and watch.get('previous_md5') != fetched_md5: | ||||
|             # Yes if we only care about it going to instock, AND we are in stock | ||||
| @@ -60,5 +63,4 @@ class perform_site_check(difference_detection_processor): | ||||
|  | ||||
|         # Always record the new checksum | ||||
|         update_obj["previous_md5"] = fetched_md5 | ||||
|  | ||||
|         return changed_detected, update_obj, self.fetcher.instock_data.encode('utf-8') | ||||
|         return changed_detected, update_obj, self.fetcher.instock_data.encode('utf-8').strip() | ||||
|   | ||||
| @@ -2,16 +2,17 @@ | ||||
|  | ||||
| import hashlib | ||||
| import json | ||||
| import logging | ||||
| import os | ||||
| import re | ||||
| import urllib3 | ||||
|  | ||||
| from changedetectionio import content_fetcher, html_tools | ||||
| from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT | ||||
| from copy import deepcopy | ||||
| from . import difference_detection_processor | ||||
| from ..html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text | ||||
| from changedetectionio import html_tools, content_fetchers | ||||
| from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT | ||||
| import changedetectionio.content_fetchers | ||||
| from copy import deepcopy | ||||
| from loguru import logger | ||||
|  | ||||
| urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) | ||||
|  | ||||
| @@ -60,7 +61,7 @@ class perform_site_check(difference_detection_processor): | ||||
|         update_obj['previous_md5_before_filters'] = hashlib.md5(self.fetcher.content.encode('utf-8')).hexdigest() | ||||
|         if skip_when_checksum_same: | ||||
|             if update_obj['previous_md5_before_filters'] == watch.get('previous_md5_before_filters'): | ||||
|                 raise content_fetcher.checksumFromPreviousCheckWasTheSame() | ||||
|                 raise content_fetchers.exceptions.checksumFromPreviousCheckWasTheSame() | ||||
|  | ||||
|         # Fetching complete, now filters | ||||
|  | ||||
| @@ -116,7 +117,9 @@ class perform_site_check(difference_detection_processor): | ||||
|         # and then use getattr https://docs.python.org/3/reference/datamodel.html#object.__getitem__ | ||||
|         # https://realpython.com/inherit-python-dict/ instead of doing it procedurely | ||||
|         include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='include_filters') | ||||
|         include_filters_rule = [*watch.get('include_filters', []), *include_filters_from_tags] | ||||
|  | ||||
|         # 1845 - remove duplicated filters in both group and watch include filter | ||||
|         include_filters_rule = list(dict.fromkeys(watch.get('include_filters', []) + include_filters_from_tags)) | ||||
|  | ||||
|         subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='subtractive_selectors'), | ||||
|                                  *watch.get("subtractive_selectors", []), | ||||
| @@ -202,6 +205,12 @@ class perform_site_check(difference_detection_processor): | ||||
|                             is_rss=is_rss # #1874 activate the <title workaround hack | ||||
|                         ) | ||||
|  | ||||
|         if watch.get('sort_text_alphabetically') and stripped_text_from_html: | ||||
|             # Note: Because a <p>something</p> will add an extra line feed to signify the paragraph gap | ||||
|             # we end up with 'Some text\n\n', sorting will add all those extra \n at the start, so we remove them here. | ||||
|             stripped_text_from_html = stripped_text_from_html.replace('\n\n', '\n') | ||||
|             stripped_text_from_html = '\n'.join( sorted(stripped_text_from_html.splitlines(), key=lambda x: x.lower() )) | ||||
|  | ||||
|         # Re #340 - return the content before the 'ignore text' was applied | ||||
|         text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8') | ||||
|  | ||||
| @@ -235,7 +244,7 @@ class perform_site_check(difference_detection_processor): | ||||
|         # Treat pages with no renderable text content as a change? No by default | ||||
|         empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False) | ||||
|         if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0: | ||||
|             raise content_fetcher.ReplyWithContentButNoText(url=url, | ||||
|             raise content_fetchers.exceptions.ReplyWithContentButNoText(url=url, | ||||
|                                                             status_code=self.fetcher.get_last_status_code(), | ||||
|                                                             screenshot=screenshot, | ||||
|                                                             has_filters=has_filter_rule, | ||||
| @@ -335,15 +344,17 @@ class perform_site_check(difference_detection_processor): | ||||
|                 if not watch['title'] or not len(watch['title']): | ||||
|                     update_obj['title'] = html_tools.extract_element(find='title', html_content=self.fetcher.content) | ||||
|  | ||||
|         logger.debug(f"Watch UUID {uuid} content check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}") | ||||
|  | ||||
|         if changed_detected: | ||||
|             if watch.get('check_unique_lines', False): | ||||
|                 has_unique_lines = watch.lines_contain_something_unique_compared_to_history(lines=stripped_text_from_html.splitlines()) | ||||
|                 # One or more lines? unsure? | ||||
|                 if not has_unique_lines: | ||||
|                     logging.debug("check_unique_lines: UUID {} didnt have anything new setting change_detected=False".format(uuid)) | ||||
|                     logger.debug(f"check_unique_lines: UUID {uuid} didnt have anything new setting change_detected=False") | ||||
|                     changed_detected = False | ||||
|                 else: | ||||
|                     logging.debug("check_unique_lines: UUID {} had unique content".format(uuid)) | ||||
|                     logger.debug(f"check_unique_lines: UUID {uuid} had unique content") | ||||
|  | ||||
|         # Always record the new checksum | ||||
|         update_obj["previous_md5"] = fetched_md5 | ||||
|   | ||||
| @@ -1,108 +0,0 @@ | ||||
| function isItemInStock() { | ||||
|   // @todo Pass these in so the same list can be used in non-JS fetchers | ||||
|   const outOfStockTexts = [ | ||||
|     '0 in stock', | ||||
|     'agotado', | ||||
|     'artikel zurzeit vergriffen', | ||||
|     'as soon as stock is available', | ||||
|     'ausverkauft', // sold out | ||||
|     'available for back order', | ||||
|     'back-order or out of stock', | ||||
|     'backordered', | ||||
|     'benachrichtigt mich', // notify me | ||||
|     'brak na stanie', | ||||
|     'brak w magazynie', | ||||
|     'coming soon', | ||||
|     'currently have any tickets for this', | ||||
|     'currently unavailable', | ||||
|     'dostępne wkrótce', | ||||
|     'en rupture de stock', | ||||
|     'ist derzeit nicht auf lager', | ||||
|     'item is no longer available', | ||||
|     'message if back in stock', | ||||
|     'nachricht bei', | ||||
|     'nicht auf lager', | ||||
|     'nicht lieferbar', | ||||
|     'nicht zur verfügung', | ||||
|     'no disponible temporalmente', | ||||
|     'no longer in stock', | ||||
|     'no tickets available', | ||||
|     'not available', | ||||
|     'not currently available', | ||||
|     'not in stock', | ||||
|     'notify me when available', | ||||
|     'não estamos a aceitar encomendas', | ||||
|     'out of stock', | ||||
|     'out-of-stock', | ||||
|     'produkt niedostępny', | ||||
|     'sold out', | ||||
|     'temporarily out of stock', | ||||
|     'temporarily unavailable', | ||||
|     'tickets unavailable', | ||||
|     'unavailable tickets', | ||||
|     'we do not currently have an estimate of when this product will be back in stock.', | ||||
|     'zur zeit nicht an lager', | ||||
|     '已售完', | ||||
|   ]; | ||||
|  | ||||
|  | ||||
|   const negateOutOfStockRegexs = [ | ||||
|       '[0-9] in stock' | ||||
|   ] | ||||
|   var negateOutOfStockRegexs_r = []; | ||||
|   for (let i = 0; i < negateOutOfStockRegexs.length; i++) { | ||||
|     negateOutOfStockRegexs_r.push(new RegExp(negateOutOfStockRegexs[0], 'g')); | ||||
|   } | ||||
|  | ||||
|  | ||||
|   const elementsWithZeroChildren = Array.from(document.getElementsByTagName('*')).filter(element => element.children.length === 0); | ||||
|  | ||||
|   // REGEXS THAT REALLY MEAN IT'S IN STOCK | ||||
|   for (let i = elementsWithZeroChildren.length - 1; i >= 0; i--) { | ||||
|     const element = elementsWithZeroChildren[i]; | ||||
|     if (element.offsetWidth > 0 || element.offsetHeight > 0 || element.getClientRects().length > 0) { | ||||
|       var elementText=""; | ||||
|       if (element.tagName.toLowerCase() === "input") { | ||||
|         elementText = element.value.toLowerCase(); | ||||
|       } else { | ||||
|         elementText = element.textContent.toLowerCase(); | ||||
|       } | ||||
|  | ||||
|       if (elementText.length) { | ||||
|         // try which ones could mean its in stock | ||||
|         for (let i = 0; i < negateOutOfStockRegexs.length; i++) { | ||||
|           if (negateOutOfStockRegexs_r[i].test(elementText)) { | ||||
|             return 'Possibly in stock'; | ||||
|           } | ||||
|         } | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   // OTHER STUFF THAT COULD BE THAT IT'S OUT OF STOCK | ||||
|   for (let i = elementsWithZeroChildren.length - 1; i >= 0; i--) { | ||||
|     const element = elementsWithZeroChildren[i]; | ||||
|     if (element.offsetWidth > 0 || element.offsetHeight > 0 || element.getClientRects().length > 0) { | ||||
|       var elementText=""; | ||||
|       if (element.tagName.toLowerCase() === "input") { | ||||
|         elementText = element.value.toLowerCase(); | ||||
|       } else { | ||||
|         elementText = element.textContent.toLowerCase(); | ||||
|       } | ||||
|  | ||||
|       if (elementText.length) { | ||||
|         // and these mean its out of stock | ||||
|         for (const outOfStockText of outOfStockTexts) { | ||||
|           if (elementText.includes(outOfStockText)) { | ||||
|             return elementText; // item is out of stock | ||||
|           } | ||||
|         } | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   return 'Possibly in stock'; // possibly in stock, cant decide otherwise. | ||||
| } | ||||
|  | ||||
| // returns the element text that makes it think it's out of stock | ||||
| return isItemInStock(); | ||||
| @@ -2,20 +2,22 @@ | ||||
|  | ||||
| # run some tests and look if the 'custom-browser-search-string=1' connect string appeared in the correct containers | ||||
|  | ||||
| # @todo do it again but with the puppeteer one | ||||
|  | ||||
| # enable debug | ||||
| set -x | ||||
|  | ||||
| # A extra browser is configured, but we never chose to use it, so it should NOT show in the logs | ||||
| docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio;pytest tests/custom_browser_url/test_custom_browser_url.py::test_request_not_via_custom_browser_url' | ||||
| docker logs browserless-custom-url &>log.txt | ||||
| grep 'custom-browser-search-string=1' log.txt | ||||
| docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio;pytest tests/custom_browser_url/test_custom_browser_url.py::test_request_not_via_custom_browser_url' | ||||
| docker logs sockpuppetbrowser-custom-url &>log-custom.txt | ||||
| grep 'custom-browser-search-string=1' log-custom.txt | ||||
| if [ $? -ne 1 ] | ||||
| then | ||||
|   echo "Saw a request in 'browserless-custom-url' container with 'custom-browser-search-string=1' when I should not" | ||||
|   echo "Saw a request in 'sockpuppetbrowser-custom-url' container with 'custom-browser-search-string=1' when I should not - log-custom.txt" | ||||
|   exit 1 | ||||
| fi | ||||
|  | ||||
| docker logs browserless &>log.txt | ||||
| docker logs sockpuppetbrowser &>log.txt | ||||
| grep 'custom-browser-search-string=1' log.txt | ||||
| if [ $? -ne 1 ] | ||||
| then | ||||
| @@ -24,16 +26,16 @@ then | ||||
| fi | ||||
|  | ||||
| # Special connect string should appear in the custom-url container, but not in the 'default' one | ||||
| docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio;pytest tests/custom_browser_url/test_custom_browser_url.py::test_request_via_custom_browser_url' | ||||
| docker logs browserless-custom-url &>log.txt | ||||
| grep 'custom-browser-search-string=1' log.txt | ||||
| docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio;pytest tests/custom_browser_url/test_custom_browser_url.py::test_request_via_custom_browser_url' | ||||
| docker logs sockpuppetbrowser-custom-url &>log-custom.txt | ||||
| grep 'custom-browser-search-string=1' log-custom.txt | ||||
| if [ $? -ne 0 ] | ||||
| then | ||||
|   echo "Did not see request in 'browserless-custom-url' container with 'custom-browser-search-string=1' when I should" | ||||
|   echo "Did not see request in 'sockpuppetbrowser-custom-url' container with 'custom-browser-search-string=1' when I should - log-custom.txt" | ||||
|   exit 1 | ||||
| fi | ||||
|  | ||||
| docker logs browserless &>log.txt | ||||
| docker logs sockpuppetbrowser &>log.txt | ||||
| grep 'custom-browser-search-string=1' log.txt | ||||
| if [ $? -ne 1 ] | ||||
| then | ||||
|   | ||||
| @@ -10,41 +10,7 @@ set -x | ||||
| docker run --network changedet-network -d --name squid-one --hostname squid-one --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf ubuntu/squid:4.13-21.10_edge | ||||
| docker run --network changedet-network -d --name squid-two --hostname squid-two --rm -v `pwd`/tests/proxy_list/squid.conf:/etc/squid/conf.d/debian.conf ubuntu/squid:4.13-21.10_edge | ||||
|  | ||||
| # SOCKS5 related - start simple Socks5 proxy server | ||||
| # SOCKSTEST=xyz should show in the logs of this service to confirm it fetched | ||||
| docker run --network changedet-network -d --hostname socks5proxy --name socks5proxy -p 1080:1080 -e PROXY_USER=proxy_user123 -e PROXY_PASSWORD=proxy_pass123 serjs/go-socks5-proxy | ||||
| docker run --network changedet-network -d --hostname socks5proxy-noauth -p 1081:1080 --name socks5proxy-noauth  serjs/go-socks5-proxy | ||||
|  | ||||
| echo "---------------------------------- SOCKS5 -------------------" | ||||
| # SOCKS5 related - test from proxies.json | ||||
| docker run --network changedet-network \ | ||||
|   -v `pwd`/tests/proxy_socks5/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json \ | ||||
|   --rm \ | ||||
|   -e "SOCKSTEST=proxiesjson" \ | ||||
|   test-changedetectionio \ | ||||
|   bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py' | ||||
|  | ||||
| # SOCKS5 related - by manually entering in UI | ||||
| docker run --network changedet-network \ | ||||
|   --rm \ | ||||
|   -e "SOCKSTEST=manual" \ | ||||
|   test-changedetectionio \ | ||||
|   bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy.py' | ||||
|  | ||||
| # SOCKS5 related - test from proxies.json via playwright - NOTE- PLAYWRIGHT DOESNT SUPPORT AUTHENTICATING PROXY | ||||
| docker run --network changedet-network \ | ||||
|   -e "SOCKSTEST=manual-playwright" \ | ||||
|   -v `pwd`/tests/proxy_socks5/proxies.json-example-noauth:/app/changedetectionio/test-datastore/proxies.json \ | ||||
|   -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" \ | ||||
|   --rm \ | ||||
|   test-changedetectionio \ | ||||
|   bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py' | ||||
|  | ||||
| echo "socks5 server logs" | ||||
| docker logs socks5proxy | ||||
| echo "----------------------------------" | ||||
|  | ||||
| # Used for configuring a custom proxy URL via the UI | ||||
| # Used for configuring a custom proxy URL via the UI - with username+password auth | ||||
| docker run --network changedet-network -d \ | ||||
|   --name squid-custom \ | ||||
|   --hostname squid-custom \ | ||||
| @@ -60,15 +26,17 @@ docker run --network changedet-network \ | ||||
|   test-changedetectionio \ | ||||
|   bash -c 'cd changedetectionio && pytest tests/proxy_list/test_multiple_proxy.py' | ||||
|  | ||||
|  | ||||
| ## Should be a request in the default "first" squid | ||||
| set +e | ||||
| echo "- Looking for chosen.changedetection.io request in squid-one - it should NOT be here" | ||||
| docker logs squid-one 2>/dev/null|grep chosen.changedetection.io | ||||
| if [ $? -ne 0 ] | ||||
| if [ $? -ne 1 ] | ||||
| then | ||||
|   echo "Did not see a request to chosen.changedetection.io in the squid logs (while checking preferred proxy - squid one)" | ||||
|   echo "Saw a request to chosen.changedetection.io in the squid logs (while checking preferred proxy - squid one) WHEN I SHOULD NOT" | ||||
|   exit 1 | ||||
| fi | ||||
|  | ||||
| set -e | ||||
| echo "- Looking for chosen.changedetection.io request in squid-two" | ||||
| # And one in the 'second' squid (user selects this as preferred) | ||||
| docker logs squid-two 2>/dev/null|grep chosen.changedetection.io | ||||
| if [ $? -ne 0 ] | ||||
| @@ -77,7 +45,6 @@ then | ||||
|   exit 1 | ||||
| fi | ||||
|  | ||||
|  | ||||
| # Test the UI configurable proxies | ||||
| docker run --network changedet-network \ | ||||
|   test-changedetectionio \ | ||||
| @@ -85,6 +52,7 @@ docker run --network changedet-network \ | ||||
|  | ||||
|  | ||||
| # Should see a request for one.changedetection.io in there | ||||
| echo "- Looking for .changedetection.io request in squid-custom" | ||||
| docker logs squid-custom 2>/dev/null|grep "TCP_TUNNEL.200.*changedetection.io" | ||||
| if [ $? -ne 0 ] | ||||
| then | ||||
| @@ -101,7 +69,7 @@ docker run --network changedet-network \ | ||||
| set +e | ||||
| # Check request was never seen in any container | ||||
| for c in $(echo "squid-one squid-two squid-custom"); do | ||||
|   echo Checking $c | ||||
|   echo ....Checking $c | ||||
|   docker logs $c &> $c.txt | ||||
|   grep noproxy $c.txt | ||||
|   if [ $? -ne 1 ] | ||||
|   | ||||
							
								
								
									
										43
									
								
								changedetectionio/run_socks_proxy_tests.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						| @@ -0,0 +1,43 @@ | ||||
| #!/bin/bash | ||||
|  | ||||
| # exit when any command fails | ||||
| set -e | ||||
| # enable debug | ||||
| set -x | ||||
|  | ||||
|  | ||||
| # SOCKS5 related - start simple Socks5 proxy server | ||||
| # SOCKSTEST=xyz should show in the logs of this service to confirm it fetched | ||||
| docker run --network changedet-network -d --hostname socks5proxy --rm  --name socks5proxy -p 1080:1080 -e PROXY_USER=proxy_user123 -e PROXY_PASSWORD=proxy_pass123 serjs/go-socks5-proxy | ||||
| docker run --network changedet-network -d --hostname socks5proxy-noauth --rm  -p 1081:1080 --name socks5proxy-noauth  serjs/go-socks5-proxy | ||||
|  | ||||
| echo "---------------------------------- SOCKS5 -------------------" | ||||
| # SOCKS5 related - test from proxies.json | ||||
| docker run --network changedet-network \ | ||||
|   -v `pwd`/tests/proxy_socks5/proxies.json-example:/app/changedetectionio/test-datastore/proxies.json \ | ||||
|   --rm \ | ||||
|   -e "SOCKSTEST=proxiesjson" \ | ||||
|   test-changedetectionio \ | ||||
|   bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py' | ||||
|  | ||||
| # SOCKS5 related - by manually entering in UI | ||||
| docker run --network changedet-network \ | ||||
|   --rm \ | ||||
|   -e "SOCKSTEST=manual" \ | ||||
|   test-changedetectionio \ | ||||
|   bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy.py' | ||||
|  | ||||
| # SOCKS5 related - test from proxies.json via playwright - NOTE- PLAYWRIGHT DOESNT SUPPORT AUTHENTICATING PROXY | ||||
| docker run --network changedet-network \ | ||||
|   -e "SOCKSTEST=manual-playwright" \ | ||||
|   -v `pwd`/tests/proxy_socks5/proxies.json-example-noauth:/app/changedetectionio/test-datastore/proxies.json \ | ||||
|   -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" \ | ||||
|   --rm \ | ||||
|   test-changedetectionio \ | ||||
|   bash -c 'cd changedetectionio && pytest tests/proxy_socks5/test_socks5_proxy_sources.py' | ||||
|  | ||||
| echo "socks5 server logs" | ||||
| docker logs socks5proxy | ||||
| echo "----------------------------------" | ||||
|  | ||||
| docker kill socks5proxy socks5proxy-noauth | ||||
| @@ -1,4 +1,4 @@ | ||||
| <?xml version="1.0" encoding="UTF-8" standalone="no"?> | ||||
| <svg width="15" height="16.363636" viewBox="0 0 15 16.363636" xmlns="http://www.w3.org/2000/svg" xmlns:svg="http://www.w3.org/2000/svg"> | ||||
| <svg width="15" height="16.363636" viewBox="0 0 15 16.363636" xmlns="http://www.w3.org/2000/svg" > | ||||
|   <path d="m 14.318182,11.762045 v 1.1925 H 5.4102273 L 11.849318,7.1140909 C 12.234545,9.1561364 12.54,11.181818 14.318182,11.762045 Z m -6.7984093,4.601591 c 1.0759091,0 2.0256823,-0.955909 2.0256823,-2.045454 H 5.4545455 c 0,1.089545 0.9879545,2.045454 2.0652272,2.045454 z M 15,2.8622727 0.9177273,15.636136 0,14.627045 l 1.8443182,-1.6725 h -1.1625 v -1.1925 C 4.0070455,10.677273 2.1784091,4.5388636 5.3611364,2.6897727 5.8009091,2.4347727 6.0709091,1.9609091 6.0702273,1.4488636 v -0.00205 C 6.0702273,0.64772727 6.7104545,0 7.5,0 8.2895455,0 8.9297727,0.64772727 8.9297727,1.4468182 v 0.00205 C 8.9290909,1.9602319 9.199773,2.4354591 9.638864,2.6897773 10.364318,3.111141 10.827273,3.7568228 11.1525,4.5129591 L 14.085682,1.8531818 Z M 6.8181818,1.3636364 C 6.8181818,1.74 7.1236364,2.0454545 7.5,2.0454545 7.8763636,2.0454545 8.1818182,1.74 8.1818182,1.3636364 8.1818182,0.98795455 7.8763636,0.68181818 7.5,0.68181818 c -0.3763636,0 -0.6818182,0.30613637 -0.6818182,0.68181822 z" id="path2" style="fill:#f8321b;stroke-width:0.681818;fill-opacity:1"/> | ||||
| </svg> | ||||
|   | ||||
| Before Width: | Height: | Size: 1.2 KiB After Width: | Height: | Size: 1.2 KiB | 
| @@ -10,7 +10,7 @@ | ||||
|    xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | ||||
|    xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | ||||
|    xmlns="http://www.w3.org/2000/svg" | ||||
|    xmlns:svg="http://www.w3.org/2000/svg"> | ||||
|    > | ||||
|   <defs | ||||
|      id="defs16" /> | ||||
|   <sodipodi:namedview | ||||
|   | ||||
| Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 11 KiB | 
| @@ -12,7 +12,7 @@ | ||||
|    xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | ||||
|    xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | ||||
|    xmlns="http://www.w3.org/2000/svg" | ||||
|    xmlns:svg="http://www.w3.org/2000/svg"><defs | ||||
|    ><defs | ||||
|      id="defs11" /><sodipodi:namedview | ||||
|      id="namedview9" | ||||
|      pagecolor="#ffffff" | ||||
|   | ||||
| Before Width: | Height: | Size: 2.5 KiB After Width: | Height: | Size: 2.5 KiB | 
| @@ -10,7 +10,7 @@ | ||||
|    viewBox="0 0 7.1975545 4.7993639" | ||||
|    xml:space="preserve" | ||||
|    xmlns="http://www.w3.org/2000/svg" | ||||
|    xmlns:svg="http://www.w3.org/2000/svg"><defs | ||||
|    ><defs | ||||
|    id="defs19" /> | ||||
| <g | ||||
|    id="g14" | ||||
|   | ||||
| Before Width: | Height: | Size: 1.9 KiB After Width: | Height: | Size: 1.9 KiB | 
| @@ -9,7 +9,7 @@ | ||||
|    id="svg5" | ||||
|    xmlns:xlink="http://www.w3.org/1999/xlink" | ||||
|    xmlns="http://www.w3.org/2000/svg" | ||||
|    xmlns:svg="http://www.w3.org/2000/svg"> | ||||
|    > | ||||
|   <defs | ||||
|      id="defs2" /> | ||||
|   <g | ||||
|   | ||||
| Before Width: | Height: | Size: 2.4 KiB After Width: | Height: | Size: 2.4 KiB | 
| @@ -10,7 +10,7 @@ | ||||
|    xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | ||||
|    xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | ||||
|    xmlns="http://www.w3.org/2000/svg" | ||||
|    xmlns:svg="http://www.w3.org/2000/svg"> | ||||
|    > | ||||
|   <defs | ||||
|      id="defs12" /> | ||||
|   <sodipodi:namedview | ||||
|   | ||||
| Before Width: | Height: | Size: 9.7 KiB After Width: | Height: | Size: 9.7 KiB | 
| @@ -3,7 +3,6 @@ | ||||
|    xmlns:dc="http://purl.org/dc/elements/1.1/" | ||||
|    xmlns:cc="http://creativecommons.org/ns#" | ||||
|    xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" | ||||
|    xmlns:svg="http://www.w3.org/2000/svg" | ||||
|    xmlns="http://www.w3.org/2000/svg" | ||||
|    version="1.1" | ||||
|    id="Capa_1" | ||||
|   | ||||
| Before Width: | Height: | Size: 2.9 KiB After Width: | Height: | Size: 2.8 KiB | 
| @@ -13,7 +13,6 @@ | ||||
|    xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | ||||
|    xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | ||||
|    xmlns="http://www.w3.org/2000/svg" | ||||
|    xmlns:svg="http://www.w3.org/2000/svg" | ||||
|    xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" | ||||
|    xmlns:cc="http://creativecommons.org/ns#" | ||||
|    xmlns:dc="http://purl.org/dc/elements/1.1/"><sodipodi:namedview | ||||
|   | ||||
| Before Width: | Height: | Size: 3.5 KiB After Width: | Height: | Size: 3.4 KiB | 
| @@ -6,7 +6,7 @@ | ||||
|    version="1.1" | ||||
|    id="svg6" | ||||
|    xmlns="http://www.w3.org/2000/svg" | ||||
|    xmlns:svg="http://www.w3.org/2000/svg"> | ||||
|    > | ||||
|   <defs | ||||
|      id="defs10" /> | ||||
|   <path | ||||
|   | ||||
| Before Width: | Height: | Size: 892 B After Width: | Height: | Size: 854 B | 
| @@ -1,5 +1,5 @@ | ||||
| <?xml version="1.0" encoding="UTF-8" standalone="no"?> | ||||
| <svg width="18" height="19.92" viewBox="0 0 18 19.92" xmlns="http://www.w3.org/2000/svg" xmlns:svg="http://www.w3.org/2000/svg"> | ||||
| <svg width="18" height="19.92" viewBox="0 0 18 19.92" xmlns="http://www.w3.org/2000/svg" > | ||||
|   <path d="M -3,-2 H 21 V 22 H -3 Z" fill="none" id="path2"/> | ||||
|   <path d="m 15,14.08 c -0.76,0 -1.44,0.3 -1.96,0.77 L 5.91,10.7 C 5.96,10.47 6,10.24 6,10 6,9.76 5.96,9.53 5.91,9.3 L 12.96,5.19 C 13.5,5.69 14.21,6 15,6 16.66,6 18,4.66 18,3 18,1.34 16.66,0 15,0 c -1.66,0 -3,1.34 -3,3 0,0.24 0.04,0.47 0.09,0.7 L 5.04,7.81 C 4.5,7.31 3.79,7 3,7 1.34,7 0,8.34 0,10 c 0,1.66 1.34,3 3,3 0.79,0 1.5,-0.31 2.04,-0.81 l 7.12,4.16 c -0.05,0.21 -0.08,0.43 -0.08,0.65 0,1.61 1.31,2.92 2.92,2.92 1.61,0 2.92,-1.31 2.92,-2.92 0,-1.61 -1.31,-2.92 -2.92,-2.92 z" id="path4" style="fill:#0078e7;fill-opacity:1"/> | ||||
| </svg> | ||||
|   | ||||
| Before Width: | Height: | Size: 787 B After Width: | Height: | Size: 749 B | 
							
								
								
									
										44
									
								
								changedetectionio/static/images/steps.svg
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,44 @@ | ||||
| <?xml version="1.0" encoding="UTF-8" standalone="no"?> | ||||
| <svg | ||||
|    aria-hidden="true" | ||||
|    viewBox="0 0 19.966091 17.999964" | ||||
|    class="css-1oqmxjn" | ||||
|    version="1.1" | ||||
|    id="svg4" | ||||
|    sodipodi:docname="steps.svg" | ||||
|    width="19.966091" | ||||
|    height="17.999964" | ||||
|    inkscape:version="1.1.2 (0a00cf5339, 2022-02-04)" | ||||
|    xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | ||||
|    xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | ||||
|    xmlns="http://www.w3.org/2000/svg" | ||||
|    xmlns:svg="http://www.w3.org/2000/svg"> | ||||
|   <defs | ||||
|      id="defs8" /> | ||||
|   <sodipodi:namedview | ||||
|      id="namedview6" | ||||
|      pagecolor="#ffffff" | ||||
|      bordercolor="#666666" | ||||
|      borderopacity="1.0" | ||||
|      inkscape:pageshadow="2" | ||||
|      inkscape:pageopacity="0.0" | ||||
|      inkscape:pagecheckerboard="0" | ||||
|      showgrid="false" | ||||
|      fit-margin-top="0" | ||||
|      fit-margin-left="0" | ||||
|      fit-margin-right="0" | ||||
|      fit-margin-bottom="0" | ||||
|      inkscape:zoom="8.6354167" | ||||
|      inkscape:cx="-1.3896261" | ||||
|      inkscape:cy="6.1375151" | ||||
|      inkscape:window-width="1280" | ||||
|      inkscape:window-height="667" | ||||
|      inkscape:window-x="2419" | ||||
|      inkscape:window-y="250" | ||||
|      inkscape:window-maximized="0" | ||||
|      inkscape:current-layer="svg4" /> | ||||
|   <path | ||||
|      d="m 16.95807,12.000003 c -0.7076,0.0019 -1.3917,0.2538 -1.9316,0.7113 -0.5398,0.4575 -0.9005,1.091 -1.0184,1.7887 H 5.60804 c -0.80847,0.0297 -1.60693,-0.1865 -2.29,-0.62 -0.26632,-0.1847 -0.48375,-0.4315 -0.63356,-0.7189 -0.14982,-0.2874 -0.22753,-0.607 -0.22644,-0.9311 -0.02843,-0.3931 0.03646,-0.7873 0.1894,-1.1505 0.15293,-0.3632 0.38957,-0.6851 0.6906,-0.9395 0.66628,-0.4559004 1.4637,-0.6807004 2.27,-0.6400004 h 8.35003 c 0.8515,-0.0223 1.6727,-0.3206 2.34,-0.85 0.3971,-0.3622 0.7076,-0.8091 0.9084,-1.3077 0.2008,-0.49857 0.2868,-1.03596 0.2516,-1.57229 0.0113,-0.47161 -0.0887,-0.93924 -0.292,-1.36493 -0.2033,-0.4257 -0.5041,-0.79745 -0.878,-1.08507 -0.7801,-0.55815 -1.7212,-0.84609 -2.68,-0.82 H 5.95804 c -0.12537,-0.7417 -0.5248,-1.40924 -1.11913,-1.87032996 -0.59434,-0.46108 -1.3402,-0.68207 -2.08979,-0.61917 -0.74958,0.06291 -1.44818,0.40512 -1.95736,0.95881 C 0.28259,1.5230126 0,2.2477926 0,3.0000126 c 0,0.75222 0.28259,1.47699 0.79176,2.03068 0.50918,0.55369 1.20778,0.8959 1.95736,0.95881 0.74959,0.0629 1.49545,-0.15808 2.08979,-0.61917 0.59433,-0.46109 0.99376,-1.12863 1.11913,-1.87032 h 7.70003 c 0.7353,-0.03061 1.4599,0.18397 2.06,0.61 0.2548,0.19335 0.4595,0.445 0.597,0.73385 0.1375,0.28884 0.2036,0.60644 0.193,0.92615 0.0316,0.38842 -0.0247,0.77898 -0.165,1.14258 -0.1402,0.36361 -0.3607,0.69091 -0.645,0.95741 -0.5713,0.4398 -1.2799,0.663 -2,0.63 H 5.69804 c -1.03259,-0.0462 -2.05065,0.2568 -2.89,0.86 -0.43755,0.3361 -0.78838,0.7720004 -1.02322,1.2712004 -0.23484,0.4993 -0.34688,1.0474 -0.32678,1.5988 -0.00726,0.484 0.10591,0.9622 0.32934,1.3916 0.22344,0.4295 0.55012,0.7966 0.95066,1.0684 0.85039,0.5592 1.85274,0.8421 2.87,0.81 h 8.40003 c 0.0954,0.5643 0.3502,1.0896 0.7343,1.5138 0.3842,0.4242 0.8817,0.7297 1.4338,0.8803 0.5521,0.1507 1.1358,0.1403 1.6822,-0.0299 0.5464,-0.1702 1.0328,-0.4932 1.4016,-0.9308 0.3688,-0.4376 0.6048,-0.9716 0.6801,-1.5389 0.0752,-0.5673 -0.0134,-1.1444 -0.2554,-1.663 -0.242,-0.5186 -0.6273,-0.9572 -1.1104,-1.264 -0.4831,-0.3068 -1.0439,-0.469 -1.6162,-0.4675 z m 0,5 c -0.3956,0 -0.7823,-0.1173 -1.1112,-0.3371 -0.3289,-0.2197 -0.5852,-0.5321 -0.7366,-0.8975 -0.1514,-0.3655 -0.191,-0.7676 -0.1138,-1.1556 0.0772,-0.3879 0.2677,-0.7443 0.5474,-1.024 0.2797,-0.2797 0.636,-0.4702 1.024,-0.5474 0.388,-0.0771 0.7901,-0.0375 1.1555,0.1138 0.3655,0.1514 0.6778,0.4078 0.8976,0.7367 0.2198,0.3289 0.3371,0.7155 0.3371,1.1111 0,0.5304 -0.2107,1.0391 -0.5858,1.4142 -0.3751,0.3751 -0.8838,0.5858 -1.4142,0.5858 z" | ||||
|      id="path2" | ||||
|      style="fill:#777777;fill-opacity:1" /> | ||||
| </svg> | ||||
| After Width: | Height: | Size: 3.7 KiB | 
| @@ -10,7 +10,7 @@ $(document).ready(function () { | ||||
|         } | ||||
|     }) | ||||
|     var browsersteps_session_id; | ||||
|     var browserless_seconds_remaining = 0; | ||||
|     var browser_interface_seconds_remaining = 0; | ||||
|     var apply_buttons_disabled = false; | ||||
|     var include_text_elements = $("#include_text_elements"); | ||||
|     var xpath_data = false; | ||||
| @@ -49,7 +49,7 @@ $(document).ready(function () { | ||||
|         $('#browsersteps-img').removeAttr('src'); | ||||
|         $("#browsersteps-click-start").show(); | ||||
|         $("#browsersteps-selector-wrapper .spinner").hide(); | ||||
|         browserless_seconds_remaining = 0; | ||||
|         browser_interface_seconds_remaining = 0; | ||||
|         browsersteps_session_id = false; | ||||
|         apply_buttons_disabled = false; | ||||
|         ctx.clearRect(0, 0, c.width, c.height); | ||||
| @@ -61,12 +61,12 @@ $(document).ready(function () { | ||||
|         $('#browser_steps >li:first-child').css('opacity', '0.5'); | ||||
|     } | ||||
|  | ||||
|     // Show seconds remaining until playwright/browserless needs to restart the session | ||||
|     // Show seconds remaining until the browser interface needs to restart the session | ||||
|     // (See comment at the top of changedetectionio/blueprint/browser_steps/__init__.py ) | ||||
|     setInterval(() => { | ||||
|         if (browserless_seconds_remaining >= 1) { | ||||
|             document.getElementById('browserless-seconds-remaining').innerText = browserless_seconds_remaining + " seconds remaining in session"; | ||||
|             browserless_seconds_remaining -= 1; | ||||
|         if (browser_interface_seconds_remaining >= 1) { | ||||
|             document.getElementById('browser-seconds-remaining').innerText = browser_interface_seconds_remaining + " seconds remaining in session"; | ||||
|             browser_interface_seconds_remaining -= 1; | ||||
|         } | ||||
|     }, "1000") | ||||
|  | ||||
| @@ -160,6 +160,12 @@ $(document).ready(function () { | ||||
|                     e.offsetX > item.left * y_scale && e.offsetX < item.left * y_scale + item.width * y_scale | ||||
|  | ||||
|                 ) { | ||||
|                     // Ignore really large ones, because we are scraping 'div' also from xpath_element_scraper but | ||||
|                     // that div or whatever could be some wrapper and would generally make you select the whole page | ||||
|                     if (item.width > 800 && item.height > 400) { | ||||
|                         return | ||||
|                     } | ||||
|  | ||||
|                     // There could be many elements here, record them all and then we'll find out which is the most 'useful' | ||||
|                     // (input, textarea, button, A etc) | ||||
|                     if (item.width < xpath_data['browser_width']) { | ||||
| @@ -261,7 +267,7 @@ $(document).ready(function () { | ||||
|             // This should trigger 'Goto site' | ||||
|             console.log("Got startup response, requesting Goto-Site (first) step fake click"); | ||||
|             $('#browser_steps >li:first-child .apply').click(); | ||||
|             browserless_seconds_remaining = 500; | ||||
|             browser_interface_seconds_remaining = 500; | ||||
|             set_first_gotosite_disabled(); | ||||
|         }).fail(function (data) { | ||||
|             console.log(data); | ||||
|   | ||||
| @@ -90,5 +90,10 @@ $(document).ready(function () { | ||||
|         } | ||||
|     } | ||||
|  | ||||
|  | ||||
|     $('#diff-form').on('submit', function (e) { | ||||
|         if ($('select[name=from_version]').val() === $('select[name=to_version]').val()) { | ||||
|             e.preventDefault(); | ||||
|             alert('Error - You are trying to compare the same version.'); | ||||
|         } | ||||
|     }); | ||||
| }); | ||||
|   | ||||
| @@ -1,19 +1,4 @@ | ||||
| $(document).ready(function () { | ||||
|     function toggle() { | ||||
|         if ($('input[name="application-fetch_backend"]:checked').val() != 'html_requests') { | ||||
|             $('#requests-override-options').hide(); | ||||
|             $('#webdriver-override-options').show(); | ||||
|         } else { | ||||
|             $('#requests-override-options').show(); | ||||
|             $('#webdriver-override-options').hide(); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     $('input[name="application-fetch_backend"]').click(function (e) { | ||||
|         toggle(); | ||||
|     }); | ||||
|     toggle(); | ||||
|  | ||||
|     $("#api-key").hover( | ||||
|         function () { | ||||
|             $("#api-key-copy").html('copy').fadeIn(); | ||||
|   | ||||
| @@ -24,14 +24,17 @@ $(document).ready(function() { | ||||
|     }) | ||||
|  | ||||
|     data = { | ||||
|         window_url : window.location.href, | ||||
|         notification_urls : $('.notification-urls').val(), | ||||
|       notification_body: $('#notification_body').val(), | ||||
|       notification_format: $('#notification_format').val(), | ||||
|       notification_title: $('#notification_title').val(), | ||||
|       notification_urls: $('.notification-urls').val(), | ||||
|       window_url: window.location.href, | ||||
|     } | ||||
|     for (key in data) { | ||||
|       if (!data[key].length) { | ||||
|         alert(key+" is empty, cannot send test.") | ||||
|         return; | ||||
|       } | ||||
|  | ||||
|  | ||||
|     if (!data['notification_urls'].length) { | ||||
|       alert("Notification URL list is empty, cannot send test.") | ||||
|       return; | ||||
|     } | ||||
|  | ||||
|     $.ajax({ | ||||
|   | ||||
							
								
								
									
										29
									
								
								changedetectionio/static/js/vis.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -0,0 +1,29 @@ | ||||
| $(document).ready(function () { | ||||
|  | ||||
|     // Lazy Hide/Show elements mechanism | ||||
|     $('[data-visible-for]').hide(); | ||||
|     function show_related_elem(e) { | ||||
|         var n = $(e).attr('name') + "=" + $(e).val(); | ||||
|         if (n === 'fetch_backend=system') { | ||||
|             n = "fetch_backend=" + default_system_fetch_backend; | ||||
|         } | ||||
|         $(`[data-visible-for~="${n}"]`).show(); | ||||
|     } | ||||
|     $(':radio').on('keyup keypress blur change click', function (e) { | ||||
|         $(`[data-visible-for]`).hide(); | ||||
|         $('.advanced-options').hide(); | ||||
|         show_related_elem(this); | ||||
|     }); | ||||
|  | ||||
|     $(':radio:checked').each(function (e) { | ||||
|        show_related_elem(this); | ||||
|     }) | ||||
|  | ||||
|  | ||||
|     // Show advanced | ||||
|     $('.show-advanced').click(function (e) { | ||||
|         $(this).closest('.tab-pane-inner').find('.advanced-options').each(function (e) { | ||||
|             $(this).toggle(); | ||||
|         }) | ||||
|     }); | ||||
| }); | ||||
| @@ -149,7 +149,7 @@ $(document).ready(function () { | ||||
|             // @todo In the future paint all that match | ||||
|             for (const c of current_default_xpath) { | ||||
|                 for (var i = selector_data['size_pos'].length; i !== 0; i--) { | ||||
|                     if (selector_data['size_pos'][i - 1].xpath === c) { | ||||
|                     if (selector_data['size_pos'][i - 1].xpath.trim() === c.trim()) { | ||||
|                         console.log("highlighting " + c); | ||||
|                         current_selected_i = i - 1; | ||||
|                         highlight_current_selected_i(); | ||||
|   | ||||
| @@ -1,40 +1,4 @@ | ||||
| $(document).ready(function () { | ||||
|     function toggle() { | ||||
|         if ($('input[name="fetch_backend"]:checked').val() == 'html_webdriver') { | ||||
|             if (playwright_enabled) { | ||||
|                 // playwright supports headers, so hide everything else | ||||
|                 // See #664 | ||||
|                 $('#requests-override-options #request-method').hide(); | ||||
|                 $('#requests-override-options #request-body').hide(); | ||||
|  | ||||
|                 // @todo connect this one up | ||||
|                 $('#ignore-status-codes-option').hide(); | ||||
|             } else { | ||||
|                 // selenium/webdriver doesnt support anything afaik, hide it all | ||||
|                 $('#requests-override-options').hide(); | ||||
|             } | ||||
|  | ||||
|             $('#webdriver-override-options').show(); | ||||
|  | ||||
|         } else if ($('input[name="fetch_backend"]:checked').val() == 'system') { | ||||
|             $('#requests-override-options #request-method').hide(); | ||||
|             $('#requests-override-options #request-body').hide(); | ||||
|             $('#ignore-status-codes-option').hide(); | ||||
|             $('#requests-override-options').hide(); | ||||
|             $('#webdriver-override-options').hide(); | ||||
|         } else { | ||||
|  | ||||
|             $('#requests-override-options').show(); | ||||
|             $('#requests-override-options *:hidden').show(); | ||||
|             $('#webdriver-override-options').hide(); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     $('input[name="fetch_backend"]').click(function (e) { | ||||
|         toggle(); | ||||
|     }); | ||||
|     toggle(); | ||||
|  | ||||
|     $('#notification-setting-reset-to-default').click(function (e) { | ||||
|         $('#notification_title').val(''); | ||||
|         $('#notification_body').val(''); | ||||
|   | ||||
| @@ -126,6 +126,8 @@ html[data-darkmode="true"] { | ||||
|   html[data-darkmode="true"] .watch-table .title-col a[target="_blank"]::after, | ||||
|   html[data-darkmode="true"] .watch-table .current-diff-url::after { | ||||
|     filter: invert(0.5) hue-rotate(10deg) brightness(2); } | ||||
|   html[data-darkmode="true"] .watch-table .status-browsersteps { | ||||
|     filter: invert(0.5) hue-rotate(10deg) brightness(1.5); } | ||||
|   html[data-darkmode="true"] .watch-table .watch-controls .state-off img { | ||||
|     opacity: 0.3; } | ||||
|   html[data-darkmode="true"] .watch-table .watch-controls .state-on img { | ||||
|   | ||||
| @@ -152,6 +152,10 @@ html[data-darkmode="true"] { | ||||
|       filter: invert(.5) hue-rotate(10deg) brightness(2); | ||||
|     } | ||||
|  | ||||
|     .status-browsersteps { | ||||
|       filter: invert(.5) hue-rotate(10deg) brightness(1.5); | ||||
|     } | ||||
|  | ||||
|     .watch-controls { | ||||
|       .state-off { | ||||
|         img { | ||||
|   | ||||
| @@ -402,8 +402,24 @@ label { | ||||
|   } | ||||
|  | ||||
|   #watch-add-wrapper-zone { | ||||
|     >div { | ||||
|       display: inline-block; | ||||
|  | ||||
|     @media only screen and (min-width: 760px) { | ||||
|       display: flex; | ||||
|       gap: 0.3rem; | ||||
|       flex-direction: row; | ||||
|     } | ||||
|     /* URL field grows always, other stay static in width */ | ||||
|     > span { | ||||
|       flex-grow: 0; | ||||
|  | ||||
|       input { | ||||
|         width: 100%; | ||||
|         padding-right: 1em; | ||||
|       } | ||||
|  | ||||
|       &:first-child { | ||||
|         flex-grow: 1; | ||||
|       } | ||||
|     } | ||||
|  | ||||
|     @media only screen and (max-width: 760px) { | ||||
| @@ -944,10 +960,8 @@ ul { | ||||
|  | ||||
| @import "parts/_visualselector"; | ||||
|  | ||||
| #webdriver-override-options { | ||||
|   input[type="number"] { | ||||
| #webdriver_delay { | ||||
|     width: 5em; | ||||
|   } | ||||
| } | ||||
|  | ||||
| #api-key { | ||||
| @@ -1082,3 +1096,16 @@ ul { | ||||
|   white-space: nowrap; | ||||
| } | ||||
|  | ||||
| #chrome-extension-link { | ||||
|   img { | ||||
|     height: 21px; | ||||
|     padding: 2px; | ||||
|     vertical-align: middle; | ||||
|   } | ||||
|  | ||||
|   padding: 9px; | ||||
|   border: 1px solid var(--color-grey-800); | ||||
|   border-radius: 10px; | ||||
|   vertical-align: middle; | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -342,6 +342,8 @@ html[data-darkmode="true"] { | ||||
|   html[data-darkmode="true"] .watch-table .title-col a[target="_blank"]::after, | ||||
|   html[data-darkmode="true"] .watch-table .current-diff-url::after { | ||||
|     filter: invert(0.5) hue-rotate(10deg) brightness(2); } | ||||
|   html[data-darkmode="true"] .watch-table .status-browsersteps { | ||||
|     filter: invert(0.5) hue-rotate(10deg) brightness(1.5); } | ||||
|   html[data-darkmode="true"] .watch-table .watch-controls .state-off img { | ||||
|     opacity: 0.3; } | ||||
|   html[data-darkmode="true"] .watch-table .watch-controls .state-on img { | ||||
| @@ -683,11 +685,23 @@ label:hover { | ||||
|   #new-watch-form legend { | ||||
|     color: var(--color-text-legend); | ||||
|     font-weight: bold; } | ||||
|   #new-watch-form #watch-add-wrapper-zone > div { | ||||
|     display: inline-block; } | ||||
|   @media only screen and (max-width: 760px) { | ||||
|     #new-watch-form #watch-add-wrapper-zone #url { | ||||
|       width: 100%; } } | ||||
|   #new-watch-form #watch-add-wrapper-zone { | ||||
|     /* URL field grows always, other stay static in width */ } | ||||
|     @media only screen and (min-width: 760px) { | ||||
|       #new-watch-form #watch-add-wrapper-zone { | ||||
|         display: flex; | ||||
|         gap: 0.3rem; | ||||
|         flex-direction: row; } } | ||||
|     #new-watch-form #watch-add-wrapper-zone > span { | ||||
|       flex-grow: 0; } | ||||
|       #new-watch-form #watch-add-wrapper-zone > span input { | ||||
|         width: 100%; | ||||
|         padding-right: 1em; } | ||||
|       #new-watch-form #watch-add-wrapper-zone > span:first-child { | ||||
|         flex-grow: 1; } | ||||
|     @media only screen and (max-width: 760px) { | ||||
|       #new-watch-form #watch-add-wrapper-zone #url { | ||||
|         width: 100%; } } | ||||
|  | ||||
| #diff-col { | ||||
|   padding-left: 40px; } | ||||
| @@ -1065,7 +1079,7 @@ ul { | ||||
| #selector-current-xpath { | ||||
|   font-size: 80%; } | ||||
|  | ||||
| #webdriver-override-options input[type="number"] { | ||||
| #webdriver_delay { | ||||
|   width: 5em; } | ||||
|  | ||||
| #api-key:hover { | ||||
| @@ -1166,3 +1180,13 @@ ul { | ||||
|   .restock-label.not-in-stock { | ||||
|     background-color: var(--color-background-button-cancel); | ||||
|     color: #777; } | ||||
|  | ||||
| #chrome-extension-link { | ||||
|   padding: 9px; | ||||
|   border: 1px solid var(--color-grey-800); | ||||
|   border-radius: 10px; | ||||
|   vertical-align: middle; } | ||||
|   #chrome-extension-link img { | ||||
|     height: 21px; | ||||
|     padding: 2px; | ||||
|     vertical-align: middle; } | ||||
|   | ||||
| @@ -9,7 +9,6 @@ from copy import deepcopy, copy | ||||
| from os import path, unlink | ||||
| from threading import Lock | ||||
| import json | ||||
| import logging | ||||
| import os | ||||
| import re | ||||
| import requests | ||||
| @@ -17,6 +16,7 @@ import secrets | ||||
| import threading | ||||
| import time | ||||
| import uuid as uuid_builder | ||||
| from loguru import logger | ||||
|  | ||||
| # Because the server will run as a daemon and wont know the URL for notification links when firing off a notification | ||||
| BASE_URL_NOT_SET_TEXT = '("Base URL" not set - see settings - notifications)' | ||||
| @@ -42,7 +42,7 @@ class ChangeDetectionStore: | ||||
|         self.__data = App.model() | ||||
|         self.datastore_path = datastore_path | ||||
|         self.json_store_path = "{}/url-watches.json".format(self.datastore_path) | ||||
|         print(">>> Datastore path is ", self.json_store_path) | ||||
|         logger.info(f"Datastore path is '{self.json_store_path}'") | ||||
|         self.needs_write = False | ||||
|         self.start_time = time.time() | ||||
|         self.stop_thread = False | ||||
| @@ -83,12 +83,12 @@ class ChangeDetectionStore: | ||||
|                 for uuid, watch in self.__data['watching'].items(): | ||||
|                     watch['uuid']=uuid | ||||
|                     self.__data['watching'][uuid] = Watch.model(datastore_path=self.datastore_path, default=watch) | ||||
|                     print("Watching:", uuid, self.__data['watching'][uuid]['url']) | ||||
|                     logger.info(f"Watching: {uuid} {self.__data['watching'][uuid]['url']}") | ||||
|  | ||||
|         # First time ran, Create the datastore. | ||||
|         except (FileNotFoundError): | ||||
|             if include_default_watches: | ||||
|                 print("No JSON DB found at {}, creating JSON store at {}".format(self.json_store_path, self.datastore_path)) | ||||
|                 logger.critical(f"No JSON DB found at {self.json_store_path}, creating JSON store at {self.datastore_path}") | ||||
|                 self.add_watch(url='https://news.ycombinator.com/', | ||||
|                                tag='Tech news', | ||||
|                                extras={'fetch_backend': 'html_requests'}) | ||||
| @@ -139,7 +139,7 @@ class ChangeDetectionStore: | ||||
|         save_data_thread = threading.Thread(target=self.save_datastore).start() | ||||
|  | ||||
|     def set_last_viewed(self, uuid, timestamp): | ||||
|         logging.debug("Setting watch UUID: {} last viewed to {}".format(uuid, int(timestamp))) | ||||
|         logger.debug(f"Setting watch UUID: {uuid} last viewed to {int(timestamp)}") | ||||
|         self.data['watching'][uuid].update({'last_viewed': int(timestamp)}) | ||||
|         self.needs_write = True | ||||
|  | ||||
| @@ -234,7 +234,7 @@ class ChangeDetectionStore: | ||||
|  | ||||
|         # Probably their should be dict... | ||||
|         for watch in self.data['watching'].values(): | ||||
|             if watch['url'] == url: | ||||
|             if watch['url'].lower() == url.lower(): | ||||
|                 return True | ||||
|  | ||||
|         return False | ||||
| @@ -248,12 +248,14 @@ class ChangeDetectionStore: | ||||
|                 'check_count': 0, | ||||
|                 'fetch_time' : 0.0, | ||||
|                 'has_ldjson_price_data': None, | ||||
|                 'in_stock': None, | ||||
|                 'last_checked': 0, | ||||
|                 'last_error': False, | ||||
|                 'last_notification_error': False, | ||||
|                 'last_viewed': 0, | ||||
|                 'previous_md5': False, | ||||
|                 'previous_md5_before_filters': False, | ||||
|                 'remote_server_reply': None, | ||||
|                 'track_ldjson_price_data': None, | ||||
|             }) | ||||
|  | ||||
| @@ -315,7 +317,7 @@ class ChangeDetectionStore: | ||||
|                             apply_extras['include_filters'] = [res['css_filter']] | ||||
|  | ||||
|             except Exception as e: | ||||
|                 logging.error("Error fetching metadata for shared watch link", url, str(e)) | ||||
|                 logger.error(f"Error fetching metadata for shared watch link {url} {str(e)}") | ||||
|                 flash("Error fetching metadata for {}".format(url), 'error') | ||||
|                 return False | ||||
|         from .model.Watch import is_safe_url | ||||
| @@ -333,7 +335,8 @@ class ChangeDetectionStore: | ||||
|  | ||||
|         # Or if UUIDs given directly | ||||
|         if tag_uuids: | ||||
|             apply_extras['tags'] = list(set(apply_extras['tags'] + tag_uuids)) | ||||
|             for t in tag_uuids: | ||||
|                 apply_extras['tags'] = list(set(apply_extras['tags'] + [t.strip()])) | ||||
|  | ||||
|         # Make any uuids unique | ||||
|         if apply_extras.get('tags'): | ||||
| @@ -343,7 +346,7 @@ class ChangeDetectionStore: | ||||
|  | ||||
|         new_uuid = new_watch.get('uuid') | ||||
|  | ||||
|         logging.debug("Added URL {} - {}".format(url, new_uuid)) | ||||
|         logger.debug(f"Adding URL {url} - {new_uuid}") | ||||
|  | ||||
|         for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']: | ||||
|             if k in apply_extras: | ||||
| @@ -360,7 +363,7 @@ class ChangeDetectionStore: | ||||
|         if write_to_disk_now: | ||||
|             self.sync_to_json() | ||||
|  | ||||
|         print("added ", url) | ||||
|         logger.debug(f"Added '{url}'") | ||||
|  | ||||
|         return new_uuid | ||||
|  | ||||
| @@ -414,14 +417,13 @@ class ChangeDetectionStore: | ||||
|  | ||||
|  | ||||
|     def sync_to_json(self): | ||||
|         logging.info("Saving JSON..") | ||||
|         print("Saving JSON..") | ||||
|         logger.info("Saving JSON..") | ||||
|         try: | ||||
|             data = deepcopy(self.__data) | ||||
|         except RuntimeError as e: | ||||
|             # Try again in 15 seconds | ||||
|             time.sleep(15) | ||||
|             logging.error ("! Data changed when writing to JSON, trying again.. %s", str(e)) | ||||
|             logger.error(f"! Data changed when writing to JSON, trying again.. {str(e)}") | ||||
|             self.sync_to_json() | ||||
|             return | ||||
|         else: | ||||
| @@ -434,7 +436,7 @@ class ChangeDetectionStore: | ||||
|                     json.dump(data, json_file, indent=4) | ||||
|                 os.replace(self.json_store_path+".tmp", self.json_store_path) | ||||
|             except Exception as e: | ||||
|                 logging.error("Error writing JSON!! (Main JSON file save was skipped) : %s", str(e)) | ||||
|                 logger.error(f"Error writing JSON!! (Main JSON file save was skipped) : {str(e)}") | ||||
|  | ||||
|             self.needs_write = False | ||||
|             self.needs_write_urgent = False | ||||
| @@ -445,7 +447,16 @@ class ChangeDetectionStore: | ||||
|  | ||||
|         while True: | ||||
|             if self.stop_thread: | ||||
|                 print("Shutting down datastore thread") | ||||
|                 # Suppressing "Logging error in Loguru Handler #0" during CICD. | ||||
|                 # Not a meaningful difference for a real use-case just for CICD. | ||||
|                 # the side effect is a "Shutting down datastore thread" message | ||||
|                 # at the end of each test. | ||||
|                 # But still more looking better. | ||||
|                 import sys | ||||
|                 logger.remove() | ||||
|                 logger.add(sys.stderr) | ||||
|  | ||||
|                 logger.critical("Shutting down datastore thread") | ||||
|                 return | ||||
|  | ||||
|             if self.needs_write or self.needs_write_urgent: | ||||
| @@ -461,7 +472,7 @@ class ChangeDetectionStore: | ||||
|     # Go through the datastore path and remove any snapshots that are not mentioned in the index | ||||
|     # This usually is not used, but can be handy. | ||||
|     def remove_unused_snapshots(self): | ||||
|         print ("Removing snapshots from datastore that are not in the index..") | ||||
|         logger.info("Removing snapshots from datastore that are not in the index..") | ||||
|  | ||||
|         index=[] | ||||
|         for uuid in self.data['watching']: | ||||
| @@ -474,7 +485,7 @@ class ChangeDetectionStore: | ||||
|         for uuid in self.data['watching']: | ||||
|             for item in pathlib.Path(self.datastore_path).rglob(uuid+"/*.txt"): | ||||
|                 if not str(item) in index: | ||||
|                     print ("Removing",item) | ||||
|                     logger.info(f"Removing {item}") | ||||
|                     unlink(item) | ||||
|  | ||||
|     @property | ||||
| @@ -560,7 +571,7 @@ class ChangeDetectionStore: | ||||
|             if os.path.isfile(filepath): | ||||
|                 headers.update(parse_headers_from_text_file(filepath)) | ||||
|         except Exception as e: | ||||
|             print(f"ERROR reading headers.txt at {filepath}", str(e)) | ||||
|             logger.error(f"ERROR reading headers.txt at {filepath} {str(e)}") | ||||
|  | ||||
|         watch = self.data['watching'].get(uuid) | ||||
|         if watch: | ||||
| @@ -571,7 +582,7 @@ class ChangeDetectionStore: | ||||
|                 if os.path.isfile(filepath): | ||||
|                     headers.update(parse_headers_from_text_file(filepath)) | ||||
|             except Exception as e: | ||||
|                 print(f"ERROR reading headers.txt at {filepath}", str(e)) | ||||
|                 logger.error(f"ERROR reading headers.txt at {filepath} {str(e)}") | ||||
|  | ||||
|             # In /datastore/tag-name.txt | ||||
|             tags = self.get_all_tags_for_watch(uuid=uuid) | ||||
| @@ -582,7 +593,7 @@ class ChangeDetectionStore: | ||||
|                     if os.path.isfile(filepath): | ||||
|                         headers.update(parse_headers_from_text_file(filepath)) | ||||
|                 except Exception as e: | ||||
|                     print(f"ERROR reading headers.txt at {filepath}", str(e)) | ||||
|                     logger.error(f"ERROR reading headers.txt at {filepath} {str(e)}") | ||||
|  | ||||
|         return headers | ||||
|  | ||||
| @@ -600,13 +611,13 @@ class ChangeDetectionStore: | ||||
|     def add_tag(self, name): | ||||
|         # If name exists, return that | ||||
|         n = name.strip().lower() | ||||
|         print (f">>> Adding new tag - '{n}'") | ||||
|         logger.debug(f">>> Adding new tag - '{n}'") | ||||
|         if not n: | ||||
|             return False | ||||
|  | ||||
|         for uuid, tag in self.__data['settings']['application'].get('tags', {}).items(): | ||||
|             if n == tag.get('title', '').lower().strip(): | ||||
|                 print (f">>> Tag {name} already exists") | ||||
|                 logger.warning(f"Tag '{name}' already exists, skipping creation.") | ||||
|                 return uuid | ||||
|  | ||||
|         # Eventually almost everything todo with a watch will apply as a Tag | ||||
| @@ -668,7 +679,7 @@ class ChangeDetectionStore: | ||||
|         updates_available = self.get_updates_available() | ||||
|         for update_n in updates_available: | ||||
|             if update_n > self.__data['settings']['application']['schema_version']: | ||||
|                 print ("Applying update_{}".format((update_n))) | ||||
|                 logger.critical(f"Applying update_{update_n}") | ||||
|                 # Wont exist on fresh installs | ||||
|                 if os.path.exists(self.json_store_path): | ||||
|                     shutil.copyfile(self.json_store_path, self.datastore_path+"/url-watches-before-{}.json".format(update_n)) | ||||
| @@ -676,8 +687,8 @@ class ChangeDetectionStore: | ||||
|                 try: | ||||
|                     update_method = getattr(self, "update_{}".format(update_n))() | ||||
|                 except Exception as e: | ||||
|                     print("Error while trying update_{}".format((update_n))) | ||||
|                     print(e) | ||||
|                     logger.error(f"Error while trying update_{update_n}") | ||||
|                     logger.error(e) | ||||
|                     # Don't run any more updates | ||||
|                     return | ||||
|                 else: | ||||
| @@ -715,7 +726,7 @@ class ChangeDetectionStore: | ||||
|                         with open(os.path.join(target_path, "history.txt"), "w") as f: | ||||
|                             f.writelines(history) | ||||
|                     else: | ||||
|                         logging.warning("Datastore history directory {} does not exist, skipping history import.".format(target_path)) | ||||
|                         logger.warning(f"Datastore history directory {target_path} does not exist, skipping history import.") | ||||
|  | ||||
|                 # No longer needed, dynamically pulled from the disk when needed. | ||||
|                 # But we should set it back to a empty dict so we don't break if this schema runs on an earlier version. | ||||
|   | ||||
| @@ -13,10 +13,10 @@ | ||||
|                             <div class="pure-form-message-inline"> | ||||
|                               <ul> | ||||
|                                 <li>Use <a target=_new href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service! <i><a target=_new href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes">Please read the notification services wiki here for important configuration notes</a></i>.</li> | ||||
|                                 <li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_discord">discord://</a></code> (or <code>https://discord.com/api/webhooks...</code>)) </code> only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li> | ||||
|                                 <li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_discord">discord://</a></code> (or <code>https://discord.com/api/webhooks...</code>)) only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li> | ||||
|                                 <li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> bots can't send messages to other bots, so you should specify chat ID of non-bot user.</li> | ||||
|                                 <li><code><a target=_new href="https://github.com/caronc/apprise/wiki/Notify_telegram">tgram://</a></code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li> | ||||
|                                 <li><code>gets://</code>, <code>posts://</code>, <code>puts://</code>, <code>deletes://</code> for direct API calls (or omit the "<code>s</code>" for non-SSL ie <code>get://</code>)</li> | ||||
|                                 <li><code>gets://</code>, <code>posts://</code>, <code>puts://</code>, <code>deletes://</code> for direct API calls (or omit the "<code>s</code>" for non-SSL ie <code>get://</code>) <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes#postposts">more help here</a></li> | ||||
|                                   <li>Accepts the <code>{{ '{{token}}' }}</code> placeholders listed below</li> | ||||
|                               </ul> | ||||
|                             </div> | ||||
| @@ -115,6 +115,12 @@ | ||||
| 									Warning: Contents of <code>{{ '{{diff}}' }}</code>, <code>{{ '{{diff_removed}}' }}</code>, and <code>{{ '{{diff_added}}' }}</code> depend on how the difference algorithm perceives the change. <br> | ||||
|                                     For example, an addition or removal could be perceived as a change in some cases. <a target="_new" href="https://github.com/dgtlmoon/changedetection.io/wiki/Using-the-%7B%7Bdiff%7D%7D,-%7B%7Bdiff_added%7D%7D,-and-%7B%7Bdiff_removed%7D%7D-notification-tokens">More Here</a> <br> | ||||
|                                     </p> | ||||
|                                     <p> | ||||
|                                         For JSON payloads, use <strong>|tojson</strong> without quotes for automatic escaping, for example - <code>{ "name": {{ '{{ watch_title|tojson }}' }} }</code> | ||||
|                                     </p> | ||||
|                                     <p> | ||||
|                                         URL encoding, use <strong>|urlencode</strong>, for example - <code>gets://hook-website.com/test.php?title={{ '{{ watch_title|urlencode }}' }}</code> | ||||
|                                     </p> | ||||
|                                 </div> | ||||
|                             </div> | ||||
|                             <div class="pure-control-group"> | ||||
|   | ||||
| @@ -39,6 +39,24 @@ | ||||
| {% endmacro %} | ||||
|  | ||||
|  | ||||
| {% macro render_nolabel_field(field) %} | ||||
|     <span> | ||||
|     {{ field(**kwargs)|safe }} | ||||
|         {% if field.errors %} | ||||
|             <span class="error"> | ||||
|       {% if field.errors %} | ||||
|           <ul class=errors> | ||||
|         {% for error in field.errors %} | ||||
|             <li>{{ error }}</li> | ||||
|         {% endfor %} | ||||
|         </ul> | ||||
|       {% endif %} | ||||
|       </span> | ||||
|         {% endif %} | ||||
|     </span> | ||||
| {% endmacro %} | ||||
|  | ||||
|  | ||||
| {% macro render_button(field) %} | ||||
|   {{ field(**kwargs)|safe }} | ||||
| {% endmacro %} | ||||
| @@ -116,7 +116,7 @@ | ||||
|                    viewBox="0 0 16.9 16.1" | ||||
|                    id="svg-heart" | ||||
|                    xmlns="http://www.w3.org/2000/svg" | ||||
|                    xmlns:svg="http://www.w3.org/2000/svg"> | ||||
|                    > | ||||
|                   <path id="heartpath" d="M 5.338316,0.50302766 C 0.71136983,0.50647126 -3.9576371,7.2707777 8.5004254,15.503028 23.833425,5.3700277 13.220206,-2.5384409 8.6762066,1.6475589 c -0.060791,0.054322 -0.11943,0.1110064 -0.1757812,0.1699219 -0.057,-0.059 -0.1157813,-0.116875 -0.1757812,-0.171875 C 7.4724566,0.86129334 6.4060729,0.50223298 5.338316,0.50302766 Z" | ||||
|                      style="fill:var(--color-background);fill-opacity:1;stroke:#ff0000;stroke-opacity:1" /> | ||||
|                 </svg> | ||||
| @@ -147,7 +147,19 @@ | ||||
|     <section class="content"> | ||||
|         <div id="overlay"> | ||||
|             <div class="content"> | ||||
|                 <strong>changedetection.io needs your support!</strong><br> | ||||
|                 <h4>Try our Chrome extension</h4> | ||||
|                 <p> | ||||
|                     <a id="chrome-extension-link" | ||||
|                        title="Try our new Chrome Extension!" | ||||
|                        href="https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop"> | ||||
|                         <img src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}"> | ||||
|                         Chrome Webstore | ||||
|                     </a> | ||||
|                 </p> | ||||
|  | ||||
|                 Easily add the current web-page from your browser directly into your changedetection.io tool, more great features coming soon! | ||||
|  | ||||
|                 <h4>Changedetection.io needs your support!</h4> | ||||
|                 <p> | ||||
|                     You can help us by supporting changedetection.io on these platforms; | ||||
|                 </p> | ||||
| @@ -170,7 +182,6 @@ | ||||
|                     And tell your friends and colleagues :) | ||||
|                 </li> | ||||
|                 </ul> | ||||
|                 </p> | ||||
|                 <p> | ||||
|                     The more popular changedetection.io is, the more time we can dedicate to adding amazing features! | ||||
|                 </p> | ||||
|   | ||||
| @@ -13,7 +13,7 @@ | ||||
| <script src="{{url_for('static_content', group='js', filename='diff-overview.js')}}" defer></script> | ||||
|  | ||||
| <div id="settings"> | ||||
|     <form class="pure-form " action="" method="GET"> | ||||
|     <form class="pure-form " action="" method="GET" id="diff-form"> | ||||
|         <fieldset> | ||||
|             {% if versions|length >= 1 %} | ||||
|                 <strong>Compare</strong> | ||||
|   | ||||
| @@ -3,6 +3,7 @@ | ||||
| {% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %} | ||||
| {% from '_common_fields.jinja' import render_common_settings_form %} | ||||
| <script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script> | ||||
| <script> | ||||
|     const browser_steps_available_screenshots=JSON.parse('{{ watch.get_browsersteps_available_screenshots|tojson }}'); | ||||
|     const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}'); | ||||
| @@ -13,13 +14,13 @@ | ||||
| {% if emailprefix %} | ||||
|     const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}'); | ||||
| {% endif %} | ||||
|     const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}"; | ||||
|     const notification_base_url="{{url_for('ajax_callback_send_notification_test', watch_uuid=uuid)}}"; | ||||
|     const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %}; | ||||
|     const recheck_proxy_start_url="{{url_for('check_proxies.start_check', uuid=uuid)}}"; | ||||
|     const proxy_recheck_status_url="{{url_for('check_proxies.get_recheck_status', uuid=uuid)}}"; | ||||
|     const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}"; | ||||
|     const watch_visual_selector_data_url="{{url_for('static_content', group='visual_selector_data', filename=uuid)}}"; | ||||
|  | ||||
|     const default_system_fetch_backend="{{ settings_application['fetch_backend'] }}"; | ||||
| </script> | ||||
|  | ||||
| <script src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script> | ||||
| @@ -124,10 +125,9 @@ | ||||
|                         </span> | ||||
|                     </div> | ||||
|                 {% endif %} | ||||
|                     <div  class="pure-control-group inline-radio"> | ||||
|                         {{ render_checkbox_field(form.ignore_status_codes) }} | ||||
|                     </div> | ||||
|                 <fieldset id="webdriver-override-options"> | ||||
|  | ||||
|                 <!-- webdriver always --> | ||||
|                 <fieldset data-visible-for="fetch_backend=html_webdriver"  style="display: none;"> | ||||
|                     <div class="pure-control-group"> | ||||
|                         {{ render_field(form.webdriver_delay) }} | ||||
|                         <div class="pure-form-message-inline"> | ||||
| @@ -140,23 +140,40 @@ | ||||
|                         </div> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group"> | ||||
|                         <a class="pure-button button-secondary button-xsmall show-advanced">Show advanced options</a> | ||||
|                     </div> | ||||
|                     <div class="advanced-options"  style="display: none;"> | ||||
|                         {{ render_field(form.webdriver_js_execute_code) }} | ||||
|                         <div class="pure-form-message-inline"> | ||||
|                             Run this code before performing change detection, handy for filling in fields and other actions <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Run-JavaScript-before-change-detection">More help and examples here</a> | ||||
|                             Run this code before performing change detection, handy for filling in fields and other | ||||
|                             actions <a | ||||
|                                 href="https://github.com/dgtlmoon/changedetection.io/wiki/Run-JavaScript-before-change-detection">More | ||||
|                             help and examples here</a> | ||||
|                         </div> | ||||
|                     </div> | ||||
|                 </fieldset> | ||||
|                 <fieldset class="pure-group" id="requests-override-options"> | ||||
|                     {% if not playwright_enabled %} | ||||
|                         <div class="pure-form-message-inline"> | ||||
|                             <strong>Request override is currently only used by the <i>Basic fast Plaintext/HTTP Client</i> method.</strong> | ||||
|                         </div> | ||||
|                     {% endif %} | ||||
|                     <div class="pure-control-group" id="request-method"> | ||||
|                         {{ render_field(form.method) }} | ||||
|                 <!-- html requests always --> | ||||
|                 <fieldset data-visible-for="fetch_backend=html_requests"> | ||||
|                     <div class="pure-control-group"> | ||||
|                         <a class="pure-button button-secondary button-xsmall show-advanced">Show advanced options</a> | ||||
|                     </div> | ||||
|                     <div class="pure-control-group" id="request-headers"> | ||||
| {{ render_field(form.headers, rows=5, placeholder="Example | ||||
|                     <div class="advanced-options"  style="display: none;"> | ||||
|                         <div class="pure-control-group" id="request-method"> | ||||
|                             {{ render_field(form.method) }} | ||||
|                         </div> | ||||
|                         <div id="request-body"> | ||||
|                                             {{ render_field(form.body, rows=5, placeholder="Example | ||||
| { | ||||
|    \"name\":\"John\", | ||||
|    \"age\":30, | ||||
|    \"car\":null | ||||
| }") }} | ||||
|                         </div> | ||||
|                     </div> | ||||
|                 </fieldset> | ||||
|             <!-- hmm --> | ||||
|                 <div class="pure-control-group advanced-options"  style="display: none;"> | ||||
|                     {{ render_field(form.headers, rows=5, placeholder="Example | ||||
| Cookie: foobar | ||||
| User-Agent: wonderbra 1.0") }} | ||||
|  | ||||
| @@ -169,17 +186,12 @@ User-Agent: wonderbra 1.0") }} | ||||
|                             <br> | ||||
|                             (Not supported by Selenium browser) | ||||
|                         </div> | ||||
|  | ||||
|                     </div> | ||||
|                     <div class="pure-control-group" id="request-body"> | ||||
|                                         {{ render_field(form.body, rows=5, placeholder="Example | ||||
| { | ||||
|    \"name\":\"John\", | ||||
|    \"age\":30, | ||||
|    \"car\":null | ||||
| }") }} | ||||
|             <fieldset data-visible-for="fetch_backend=html_requests fetch_backend=html_webdriver" > | ||||
|                     <div class="pure-control-group inline-radio advanced-options"  style="display: none;"> | ||||
|                     {{ render_checkbox_field(form.ignore_status_codes) }} | ||||
|                     </div> | ||||
|                 </fieldset> | ||||
|             </fieldset> | ||||
|             </div> | ||||
|             {% if playwright_enabled %} | ||||
|             <div class="tab-pane-inner" id="browser-steps"> | ||||
| @@ -216,7 +228,7 @@ User-Agent: wonderbra 1.0") }} | ||||
|                                 </div> | ||||
|                             </div> | ||||
|                             <div id="browser-steps-fieldlist" style="padding-left: 1em;  width: 350px; font-size: 80%;" > | ||||
|                                 <span id="browserless-seconds-remaining">Loading</span> <span style="font-size: 80%;"> (<a target=_new href="https://github.com/dgtlmoon/changedetection.io/pull/478/files#diff-1a79d924d1840c485238e66772391268a89c95b781d69091384cf1ea1ac146c9R4">?</a>) </span> | ||||
|                                 <span id="browser-seconds-remaining">Loading</span> <span style="font-size: 80%;"> (<a target=_new href="https://github.com/dgtlmoon/changedetection.io/pull/478/files#diff-1a79d924d1840c485238e66772391268a89c95b781d69091384cf1ea1ac146c9R4">?</a>) </span> | ||||
|                                 {{ render_field(form.browser_steps) }} | ||||
|                             </div> | ||||
|                         </div> | ||||
| @@ -327,6 +339,10 @@ nav | ||||
|                     <span class="pure-form-message-inline">When content is merely moved in a list, it will also trigger an <strong>addition</strong>, consider enabling <code><strong>Only trigger when unique lines appear</strong></code></span> | ||||
|                 </fieldset> | ||||
|  | ||||
|                 <fieldset class="pure-control-group"> | ||||
|                     {{ render_checkbox_field(form.sort_text_alphabetically) }} | ||||
|                     <span class="pure-form-message-inline">Helps reduce changes detected caused by sites shuffling lines around, combine with <i>check unique lines</i> below.</span> | ||||
|                 </fieldset> | ||||
|                 <fieldset class="pure-control-group"> | ||||
|                     {{ render_checkbox_field(form.check_unique_lines) }} | ||||
|                     <span class="pure-form-message-inline">Good for websites that just move the content around, and you want to know when NEW content is added, compares new lines against all history for this watch.</span> | ||||
| @@ -389,6 +405,7 @@ Unavailable") }} | ||||
|                                 <li>Use <code>//(?aiLmsux))</code> type flags (more <a href="https://docs.python.org/3/library/re.html#index-15">information here</a>)<br></li> | ||||
|                                 <li>Keyword example ‐ example <code>Out of stock</code></li> | ||||
|                                 <li>Use groups to extract just that text ‐ example <code>/reports.+?(\d+)/i</code> returns a list of years only</li> | ||||
|                                 <li>Example - match lines containing a keyword <code>/.*icecream.*/</code></li> | ||||
|                             </ul> | ||||
|                         </li> | ||||
|                         <li>One line per regular-expression/string match</li> | ||||
| @@ -470,6 +487,10 @@ Unavailable") }} | ||||
|                             <td>Last fetch time</td> | ||||
|                             <td>{{ watch.fetch_time }}s</td> | ||||
|                         </tr> | ||||
|                         <tr> | ||||
|                             <td>Notification alert count</td> | ||||
|                             <td>{{ watch.notification_alert_count }}</td> | ||||
|                         </tr> | ||||
|                         </tbody> | ||||
|                     </table> | ||||
|                 </div> | ||||
|   | ||||
| @@ -107,7 +107,7 @@ | ||||
|                                     <option value="" style="color: #aaa"> -- none --</option> | ||||
|                                     <option value="url">URL</option> | ||||
|                                     <option value="title">Title</option> | ||||
|                                     <option value="include_filter">CSS/xPath filter</option> | ||||
|                                     <option value="include_filters">CSS/xPath filter</option> | ||||
|                                     <option value="tag">Group / Tag name(s)</option> | ||||
|                                     <option value="interval_minutes">Recheck time (minutes)</option> | ||||
|                                 </select></td> | ||||
|   | ||||
| @@ -4,14 +4,14 @@ | ||||
| {% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %} | ||||
| {% from '_common_fields.jinja' import render_common_settings_form %} | ||||
| <script> | ||||
|     const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}"; | ||||
|     const notification_base_url="{{url_for('ajax_callback_send_notification_test', watch_uuid=uuid)}}"; | ||||
| {% if emailprefix %} | ||||
|     const email_notification_prefix=JSON.parse('{{emailprefix|tojson}}'); | ||||
| {% endif %} | ||||
| </script> | ||||
| <script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script> | ||||
|  | ||||
| <script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='global-settings.js')}}" defer></script> | ||||
| <div class="edit-form"> | ||||
|     <div class="tabs collapsable"> | ||||
| @@ -111,7 +111,7 @@ | ||||
|                     <br> | ||||
|                     Tip: <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support">Connect using Bright Data and Oxylabs Proxies, find out more here.</a> | ||||
|                 </div> | ||||
|                 <fieldset class="pure-group" id="webdriver-override-options"> | ||||
|                 <fieldset class="pure-group" id="webdriver-override-options" data-visible-for="application-fetch_backend=html_webdriver"> | ||||
|                     <div class="pure-form-message-inline"> | ||||
|                         <strong>If you're having trouble waiting for the page to be fully rendered (text missing etc), try increasing the 'wait' time here.</strong> | ||||
|                         <br> | ||||
| @@ -168,12 +168,12 @@ nav | ||||
|            </div> | ||||
|  | ||||
|             <div class="tab-pane-inner" id="api"> | ||||
|  | ||||
|                 <h4>API Access</h4> | ||||
|                 <p>Drive your changedetection.io via API, More about <a href="https://github.com/dgtlmoon/changedetection.io/wiki/API-Reference">API access here</a></p> | ||||
|  | ||||
|                 <div class="pure-control-group"> | ||||
|                     {{ render_checkbox_field(form.application.form.api_access_token_enabled) }} | ||||
|                     <div class="pure-form-message-inline">Restrict API access limit by using <code>x-api-key</code> header</div><br> | ||||
|                     <div class="pure-form-message-inline">Restrict API access limit by using <code>x-api-key</code> header - required for the Chrome Extension to work</div><br> | ||||
|                     <div class="pure-form-message-inline"><br>API Key <span id="api-key">{{api_key}}</span> | ||||
|                         <span style="display:none;" id="api-key-copy" >copy</span> | ||||
|                     </div> | ||||
| @@ -181,6 +181,20 @@ nav | ||||
|                 <div class="pure-control-group"> | ||||
|                     <a href="{{url_for('settings_reset_api_key')}}" class="pure-button button-small button-cancel">Regenerate API key</a> | ||||
|                 </div> | ||||
|                 <div class="pure-control-group"> | ||||
|                     <h4>Chrome Extension</h4> | ||||
|                     <p>Easily add any web-page to your changedetection.io installation from within Chrome.</p> | ||||
|                     <strong>Step 1</strong> Install the extension, <strong>Step 2</strong> Navigate to this page, | ||||
|                     <strong>Step 3</strong> Open the extension from the toolbar and click "<i>Sync API Access</i>" | ||||
|                     <p> | ||||
|                         <a id="chrome-extension-link" | ||||
|                            title="Try our new Chrome Extension!" | ||||
|                            href="https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop"> | ||||
|                             <img src="{{ url_for('static_content', group='images', filename='Google-Chrome-icon.png') }}"> | ||||
|                             Chrome Webstore | ||||
|                         </a> | ||||
|                     </p> | ||||
|                 </div> | ||||
|             </div> | ||||
|             <div class="tab-pane-inner" id="proxies"> | ||||
|                 <div id="recommended-proxy"> | ||||
| @@ -236,8 +250,11 @@ nav | ||||
|                 <span class="pure-form-message-inline">SOCKS5 proxies with authentication are only supported with 'plain requests' fetcher, for other fetchers you should whitelist the IP access instead</span> | ||||
|                 </div> | ||||
|                 <div class="pure-control-group" id="extra-browsers-setting"> | ||||
|                     <span class="pure-form-message-inline"><i>Extra Browsers</i> allow changedetection.io to communicate with a different web-browser.</span><br> | ||||
|                   {{ render_field(form.requests.form.extra_browsers) }} | ||||
|                     <p> | ||||
|                     <span class="pure-form-message-inline"><i>Extra Browsers</i> can be attached to further defeat CAPTCHA's on websites that are particularly hard to scrape.</span><br> | ||||
|                     <span class="pure-form-message-inline">Simply paste the connection address into the box, <a href="https://changedetection.io/tutorial/using-bright-datas-scraping-browser-pass-captchas-and-other-protection-when-monitoring">More instructions and examples here</a> </span> | ||||
|                     </p> | ||||
|                     {{ render_field(form.requests.form.extra_browsers) }} | ||||
|                 </div> | ||||
|             </div> | ||||
|             <div id="actions"> | ||||
|   | ||||
| @@ -1 +1 @@ | ||||
| <?xml version="1.0" encoding="utf-8"?><svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 122.879 119.799" enable-background="new 0 0 122.879 119.799" xml:space="preserve"><g><path d="M49.988,0h0.016v0.007C63.803,0.011,76.298,5.608,85.34,14.652c9.027,9.031,14.619,21.515,14.628,35.303h0.007v0.033v0.04 h-0.007c-0.005,5.557-0.917,10.905-2.594,15.892c-0.281,0.837-0.575,1.641-0.877,2.409v0.007c-1.446,3.66-3.315,7.12-5.547,10.307 l29.082,26.139l0.018,0.016l0.157,0.146l0.011,0.011c1.642,1.563,2.536,3.656,2.649,5.78c0.11,2.1-0.543,4.248-1.979,5.971 l-0.011,0.016l-0.175,0.203l-0.035,0.035l-0.146,0.16l-0.016,0.021c-1.565,1.642-3.654,2.534-5.78,2.646 c-2.097,0.111-4.247-0.54-5.971-1.978l-0.015-0.011l-0.204-0.175l-0.029-0.024L78.761,90.865c-0.88,0.62-1.778,1.209-2.687,1.765 c-1.233,0.755-2.51,1.466-3.813,2.115c-6.699,3.342-14.269,5.222-22.272,5.222v0.007h-0.016v-0.007 c-13.799-0.004-26.296-5.601-35.338-14.645C5.605,76.291,0.016,63.805,0.007,50.021H0v-0.033v-0.016h0.007 c0.004-13.799,5.601-26.296,14.645-35.338C23.683,5.608,36.167,0.016,49.955,0.007V0H49.988L49.988,0z M50.004,11.21v0.007h-0.016 h-0.033V11.21c-10.686,0.007-20.372,4.35-27.384,11.359C15.56,29.578,11.213,39.274,11.21,49.973h0.007v0.016v0.033H11.21 c0.007,10.686,4.347,20.367,11.359,27.381c7.009,7.012,16.705,11.359,27.403,11.361v-0.007h0.016h0.033v0.007 c10.686-0.007,20.368-4.348,27.382-11.359c7.011-7.009,11.358-16.702,11.36-27.4h-0.006v-0.016v-0.033h0.006 c-0.006-10.686-4.35-20.372-11.358-27.384C70.396,15.56,60.703,11.213,50.004,11.21L50.004,11.21z"/></g></svg> | ||||
| <svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 122.879 119.799" enable-background="new 0 0 122.879 119.799" xml:space="preserve"><g><path d="M49.988,0h0.016v0.007C63.803,0.011,76.298,5.608,85.34,14.652c9.027,9.031,14.619,21.515,14.628,35.303h0.007v0.033v0.04 h-0.007c-0.005,5.557-0.917,10.905-2.594,15.892c-0.281,0.837-0.575,1.641-0.877,2.409v0.007c-1.446,3.66-3.315,7.12-5.547,10.307 l29.082,26.139l0.018,0.016l0.157,0.146l0.011,0.011c1.642,1.563,2.536,3.656,2.649,5.78c0.11,2.1-0.543,4.248-1.979,5.971 l-0.011,0.016l-0.175,0.203l-0.035,0.035l-0.146,0.16l-0.016,0.021c-1.565,1.642-3.654,2.534-5.78,2.646 c-2.097,0.111-4.247-0.54-5.971-1.978l-0.015-0.011l-0.204-0.175l-0.029-0.024L78.761,90.865c-0.88,0.62-1.778,1.209-2.687,1.765 c-1.233,0.755-2.51,1.466-3.813,2.115c-6.699,3.342-14.269,5.222-22.272,5.222v0.007h-0.016v-0.007 c-13.799-0.004-26.296-5.601-35.338-14.645C5.605,76.291,0.016,63.805,0.007,50.021H0v-0.033v-0.016h0.007 c0.004-13.799,5.601-26.296,14.645-35.338C23.683,5.608,36.167,0.016,49.955,0.007V0H49.988L49.988,0z M50.004,11.21v0.007h-0.016 h-0.033V11.21c-10.686,0.007-20.372,4.35-27.384,11.359C15.56,29.578,11.213,39.274,11.21,49.973h0.007v0.016v0.033H11.21 c0.007,10.686,4.347,20.367,11.359,27.381c7.009,7.012,16.705,11.359,27.403,11.361v-0.007h0.016h0.033v0.007 c10.686-0.007,20.368-4.348,27.382-11.359c7.011-7.009,11.358-16.702,11.36-27.4h-0.006v-0.016v-0.033h0.006 c-0.006-10.686-4.35-20.372-11.358-27.384C70.396,15.56,60.703,11.213,50.004,11.21L50.004,11.21z"/></g></svg> | ||||
| Before Width: | Height: | Size: 1.6 KiB After Width: | Height: | Size: 1.5 KiB | 
| @@ -1,6 +1,6 @@ | ||||
| {% extends 'base.html' %} | ||||
| {% block content %} | ||||
| {% from '_helpers.jinja' import render_simple_field, render_field %} | ||||
| {% from '_helpers.jinja' import render_simple_field, render_field, render_nolabel_field, sort_by_title %} | ||||
| <script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script> | ||||
|  | ||||
| @@ -11,17 +11,14 @@ | ||||
|         <fieldset> | ||||
|             <legend>Add a new change detection watch</legend> | ||||
|             <div id="watch-add-wrapper-zone"> | ||||
|                 <div> | ||||
|                     {{ render_simple_field(form.url, placeholder="https://...", required=true) }} | ||||
|                     {{ render_simple_field(form.tags, value=tags[active_tag].title if active_tag else '', placeholder="watch label / tag") }} | ||||
|                 </div> | ||||
|                 <div> | ||||
|                     {{ render_simple_field(form.watch_submit_button, title="Watch this URL!" ) }} | ||||
|                     {{ render_simple_field(form.edit_and_watch_submit_button, title="Edit first then Watch") }} | ||||
|                 </div> | ||||
|  | ||||
|                     {{ render_nolabel_field(form.url, placeholder="https://...", required=true) }} | ||||
|                     {{ render_nolabel_field(form.tags, value=active_tag.title if active_tag else '', placeholder="watch label / tag") }} | ||||
|                     {{ render_nolabel_field(form.watch_submit_button, title="Watch this URL!" ) }} | ||||
|                     {{ render_nolabel_field(form.edit_and_watch_submit_button, title="Edit first then Watch") }} | ||||
|             </div> | ||||
|             <div id="quick-watch-processor-type"> | ||||
|                 {{ render_simple_field(form.processor, title="Edit first then Watch") }} | ||||
|                 {{ render_simple_field(form.processor) }} | ||||
|             </div> | ||||
|  | ||||
|         </fieldset> | ||||
| @@ -40,6 +37,7 @@ | ||||
|         <button class="pure-button button-secondary button-xsmall" name="op" value="assign-tag" id="checkbox-assign-tag">Tag</button> | ||||
|         <button class="pure-button button-secondary button-xsmall" name="op" value="mark-viewed">Mark viewed</button> | ||||
|         <button class="pure-button button-secondary button-xsmall" name="op" value="notification-default">Use default notification</button> | ||||
|         <button class="pure-button button-secondary button-xsmall" name="op" value="clear-errors">Clear errors</button> | ||||
|         <button class="pure-button button-secondary button-xsmall" style="background: #dd4242;" name="op" value="clear-history">Clear/reset history</button> | ||||
|         <button class="pure-button button-secondary button-xsmall" style="background: #dd4242;" name="op" value="delete">Delete</button> | ||||
|     </div> | ||||
| @@ -49,11 +47,13 @@ | ||||
|     {% if search_q %}<div id="search-result-info">Searching "<strong><i>{{search_q}}</i></strong>"</div>{% endif %} | ||||
|     <div> | ||||
|         <a href="{{url_for('index')}}" class="pure-button button-tag {{'active' if not active_tag }}">All</a> | ||||
|         {% for uuid, tag in tags.items() %} | ||||
|             {% if tag != "" %} | ||||
|                 <a href="{{url_for('index', tag=uuid) }}" class="pure-button button-tag {{'active' if active_tag == uuid }}">{{ tag.title }}</a> | ||||
|             {% endif %} | ||||
|         {% endfor %} | ||||
|  | ||||
|     <!-- tag list --> | ||||
|     {% for uuid, tag in tags %} | ||||
|         {% if tag != "" %} | ||||
|             <a href="{{url_for('index', tag=uuid) }}" class="pure-button button-tag {{'active' if active_tag_uuid == uuid }}">{{ tag.title }}</a> | ||||
|         {% endif %} | ||||
|     {% endfor %} | ||||
|     </div> | ||||
|  | ||||
|     {% set sort_order = sort_order or 'asc' %} | ||||
| @@ -82,12 +82,15 @@ | ||||
|             </tr> | ||||
|             {% endif %} | ||||
|             {% for watch in (watches|sort(attribute=sort_attribute, reverse=sort_order == 'asc'))|pagination_slice(skip=pagination.skip) %} | ||||
|  | ||||
|                 {% set is_unviewed =  watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %} | ||||
|  | ||||
|             <tr id="{{ watch.uuid }}" | ||||
|                 class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }} processor-{{ watch['processor'] }} | ||||
|                 {% if watch.last_error is defined and watch.last_error != False %}error{% endif %} | ||||
|                 {% if watch.last_notification_error is defined and watch.last_notification_error != False %}error{% endif %} | ||||
|                 {% if watch.paused is defined and watch.paused != False %}paused{% endif %} | ||||
|                 {% if watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %}unviewed{% endif %} | ||||
|                 {% if is_unviewed %}unviewed{% endif %} | ||||
|                 {% if watch.uuid in queued_uuids %}queued{% endif %}"> | ||||
|                 <td class="inline checkbox-uuid" ><input name="uuids"  type="checkbox" value="{{ watch.uuid}} " > <span>{{ loop.index+pagination.skip }}</span></td> | ||||
|                 <td class="inline watch-controls"> | ||||
| @@ -110,6 +113,7 @@ | ||||
|                     {% endif %} | ||||
|  | ||||
|                     {%if watch.is_pdf  %}<img class="status-icon" src="{{url_for('static_content', group='images', filename='pdf-icon.svg')}}" title="Converting PDF to text" >{% endif %} | ||||
|                     {% if watch.has_browser_steps %}<img class="status-icon status-browsersteps" src="{{url_for('static_content', group='images', filename='steps.svg')}}" title="Browser Steps is enabled" >{% endif %} | ||||
|                     {% if watch.last_error is defined and watch.last_error != False %} | ||||
|                     <div class="fetch-error">{{ watch.last_error }} | ||||
|  | ||||
| @@ -141,7 +145,7 @@ | ||||
|                     {% if watch['processor'] == 'restock_diff'  %} | ||||
|                     <span class="restock-label {{'in-stock' if watch['in_stock'] else 'not-in-stock' }}" title="detecting restock conditions"> | ||||
|                         <!-- maybe some object watch['processor'][restock_diff] or.. --> | ||||
|                         {% if watch['last_checked'] %} | ||||
|                         {% if watch['last_checked'] and watch['in_stock'] != None %} | ||||
|                             {% if watch['in_stock'] %} In stock {% else %} Not in stock {% endif %} | ||||
|                         {% else %} | ||||
|                             Not yet checked | ||||
| @@ -167,7 +171,13 @@ | ||||
|                        class="recheck pure-button pure-button-primary">{% if watch.uuid in queued_uuids %}Queued{% else %}Recheck{% endif %}</a> | ||||
|                     <a href="{{ url_for('edit_page', uuid=watch.uuid)}}" class="pure-button pure-button-primary">Edit</a> | ||||
|                     {% if watch.history_n >= 2 %} | ||||
|                     <a href="{{ url_for('diff_history_page', uuid=watch.uuid) }}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">Diff</a> | ||||
|  | ||||
|                         {%  if is_unviewed %} | ||||
|                            <a href="{{ url_for('diff_history_page', uuid=watch.uuid, from_version=watch.get_next_snapshot_key_to_last_viewed) }}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">Diff</a> | ||||
|                         {% else %} | ||||
|                            <a href="{{ url_for('diff_history_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">Diff</a> | ||||
|                         {% endif %} | ||||
|  | ||||
|                     {% else %} | ||||
|                         {% if watch.history_n == 1 or (watch.history_n ==0 and watch.error_text_ctime )%} | ||||
|                             <a href="{{ url_for('preview_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button pure-button-primary">Preview</a> | ||||
| @@ -190,8 +200,8 @@ | ||||
|             </li> | ||||
|             {% endif %} | ||||
|             <li> | ||||
|                <a href="{{ url_for('form_watch_checknow', tag=active_tag, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Recheck | ||||
|                 all {% if active_tag%} in "{{tags[active_tag].title}}"{%endif%}</a> | ||||
|                <a href="{{ url_for('form_watch_checknow', tag=active_tag_uuid, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Recheck | ||||
|                 all {% if active_tag_uuid %} in "{{active_tag.title}}"{%endif%}</a> | ||||
|             </li> | ||||
|             <li> | ||||
|                 <a href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='Generic_Feed-icon.svg')}}" height="15"></a> | ||||
|   | ||||
| @@ -4,6 +4,8 @@ import pytest | ||||
| from changedetectionio import changedetection_app | ||||
| from changedetectionio import store | ||||
| import os | ||||
| import sys | ||||
| from loguru import logger | ||||
|  | ||||
| # https://github.com/pallets/flask/blob/1.1.2/examples/tutorial/tests/test_auth.py | ||||
| # Much better boilerplate than the docs | ||||
| @@ -11,24 +13,28 @@ import os | ||||
|  | ||||
| global app | ||||
|  | ||||
| # https://loguru.readthedocs.io/en/latest/resources/migration.html#replacing-caplog-fixture-from-pytest-library | ||||
| # Show loguru logs only if CICD pytest fails. | ||||
| from loguru import logger | ||||
| @pytest.fixture | ||||
| def reportlog(pytestconfig): | ||||
|     logging_plugin = pytestconfig.pluginmanager.getplugin("logging-plugin") | ||||
|     handler_id = logger.add(logging_plugin.report_handler, format="{message}") | ||||
|     yield | ||||
|     logger.remove(handler_id) | ||||
|  | ||||
| def cleanup(datastore_path): | ||||
|     import glob | ||||
|     # Unlink test output files | ||||
|     files = [ | ||||
|         'count.txt', | ||||
|         'endpoint-content.txt' | ||||
|         'headers.txt', | ||||
|         'headers-testtag.txt', | ||||
|         'notification.txt', | ||||
|         'secret.txt', | ||||
|         'url-watches.json', | ||||
|         'output.txt', | ||||
|     ] | ||||
|     for file in files: | ||||
|         try: | ||||
|             os.unlink("{}/{}".format(datastore_path, file)) | ||||
|         except FileNotFoundError: | ||||
|             pass | ||||
|  | ||||
|     for g in ["*.txt", "*.json", "*.pdf"]: | ||||
|         files = glob.glob(os.path.join(datastore_path, g)) | ||||
|         for f in files: | ||||
|             if 'proxies.json' in f: | ||||
|                 # Usually mounted by docker container during test time | ||||
|                 continue | ||||
|             if os.path.isfile(f): | ||||
|                 os.unlink(f) | ||||
|  | ||||
| @pytest.fixture(scope='session') | ||||
| def app(request): | ||||
| @@ -46,6 +52,18 @@ def app(request): | ||||
|  | ||||
|     app_config = {'datastore_path': datastore_path, 'disable_checkver' : True} | ||||
|     cleanup(app_config['datastore_path']) | ||||
|  | ||||
|     logger_level = 'TRACE' | ||||
|  | ||||
|     logger.remove() | ||||
|     log_level_for_stdout = { 'DEBUG', 'SUCCESS' } | ||||
|     logger.configure(handlers=[ | ||||
|         {"sink": sys.stdout, "level": logger_level, | ||||
|          "filter" : lambda record: record['level'].name in log_level_for_stdout}, | ||||
|         {"sink": sys.stderr, "level": logger_level, | ||||
|          "filter": lambda record: record['level'].name not in log_level_for_stdout}, | ||||
|         ]) | ||||
|  | ||||
|     datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], include_default_watches=False) | ||||
|     app = changedetection_app(app_config, datastore) | ||||
|  | ||||
|   | ||||
| @@ -7,10 +7,11 @@ from ..util import live_server_setup, wait_for_all_checks | ||||
| def do_test(client, live_server, make_test_use_extra_browser=False): | ||||
|  | ||||
|     # Grep for this string in the logs? | ||||
|     test_url = f"https://changedetection.io/ci-test.html" | ||||
|     test_url = f"https://changedetection.io/ci-test.html?non-custom-default=true" | ||||
|     # "non-custom-default" should not appear in the custom browser connection | ||||
|     custom_browser_name = 'custom browser URL' | ||||
|  | ||||
|     # needs to be set and something like 'ws://127.0.0.1:3000?stealth=1&--disable-web-security=true' | ||||
|     # needs to be set and something like 'ws://127.0.0.1:3000' | ||||
|     assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test" | ||||
|  | ||||
|     ##################### | ||||
| @@ -19,9 +20,7 @@ def do_test(client, live_server, make_test_use_extra_browser=False): | ||||
|         data={"application-empty_pages_are_a_change": "", | ||||
|               "requests-time_between_check-minutes": 180, | ||||
|               'application-fetch_backend': "html_webdriver", | ||||
|               # browserless-custom-url is setup in  .github/workflows/test-only.yml | ||||
|               # the test script run_custom_browser_url_test.sh will look for 'custom-browser-search-string' in the container logs | ||||
|               'requests-extra_browsers-0-browser_connection_url': 'ws://browserless-custom-url:3000?stealth=1&--disable-web-security=true&custom-browser-search-string=1', | ||||
|               'requests-extra_browsers-0-browser_connection_url': 'ws://sockpuppetbrowser-custom-url:3000', | ||||
|               'requests-extra_browsers-0-browser_name': custom_browser_name | ||||
|               }, | ||||
|         follow_redirects=True | ||||
| @@ -51,7 +50,8 @@ def do_test(client, live_server, make_test_use_extra_browser=False): | ||||
|         res = client.post( | ||||
|             url_for("edit_page", uuid="first"), | ||||
|             data={ | ||||
|                   "url": test_url, | ||||
|                 # 'run_customer_browser_url_tests.sh' will search for this string to know if we hit the right browser container or not | ||||
|                   "url": f"https://changedetection.io/ci-test.html?custom-browser-search-string=1", | ||||
|                   "tags": "", | ||||
|                   "headers": "", | ||||
|                   'fetch_backend': f"extra_browser_{custom_browser_name}", | ||||
|   | ||||
| @@ -37,4 +37,4 @@ def test_fetch_webdriver_content(client, live_server): | ||||
|     ) | ||||
|     logging.getLogger().info("Looking for correct fetched HTML (text) from server") | ||||
|  | ||||
|     assert b'cool it works' in res.data | ||||
|     assert b'cool it works' in res.data | ||||
|   | ||||
| @@ -0,0 +1,56 @@ | ||||
| import os | ||||
| from flask import url_for | ||||
| from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client | ||||
|  | ||||
|  | ||||
| def test_execute_custom_js(client, live_server): | ||||
|  | ||||
|     live_server_setup(live_server) | ||||
|     assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test" | ||||
|  | ||||
|     test_url = url_for('test_interactive_html_endpoint', _external=True) | ||||
|     test_url = test_url.replace('localhost.localdomain', 'cdio') | ||||
|     test_url = test_url.replace('localhost', 'cdio') | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("form_quick_watch_add"), | ||||
|         data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b"Watch added in Paused state, saving will unpause" in res.data | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("edit_page", uuid="first", unpause_on_save=1), | ||||
|         data={ | ||||
|             "url": test_url, | ||||
|             "tags": "", | ||||
|             'fetch_backend': "html_webdriver", | ||||
|             'webdriver_js_execute_code': 'document.querySelector("button[name=test-button]").click();', | ||||
|             'headers': "testheader: yes\buser-agent: MyCustomAgent", | ||||
|         }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"unpaused" in res.data | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     uuid = extract_UUID_from_client(client) | ||||
|     assert live_server.app.config['DATASTORE'].data['watching'][uuid].history_n >= 1, "Watch history had atleast 1 (everything fetched OK)" | ||||
|  | ||||
|     assert b"This text should be removed" not in res.data | ||||
|  | ||||
|     # Check HTML conversion detected and workd | ||||
|     res = client.get( | ||||
|         url_for("preview_page", uuid=uuid), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"This text should be removed" not in res.data | ||||
|     assert b"I smell JavaScript because the button was pressed" in res.data | ||||
|  | ||||
|     assert b"testheader: yes" in res.data | ||||
|     assert b"user-agent: mycustomagent" in res.data | ||||
|  | ||||
|     client.get( | ||||
|         url_for("form_delete", uuid="all"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
| @@ -1,6 +1,6 @@ | ||||
| #!/usr/bin/python3 | ||||
|  | ||||
| import time | ||||
| import os | ||||
| from flask import url_for | ||||
| from ..util import live_server_setup, wait_for_all_checks | ||||
|  | ||||
| @@ -9,22 +9,20 @@ def test_preferred_proxy(client, live_server): | ||||
|     live_server_setup(live_server) | ||||
|     url = "http://chosen.changedetection.io" | ||||
|  | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("import_page"), | ||||
|         # Because a URL wont show in squid/proxy logs due it being SSLed | ||||
|         # Use plain HTTP or a specific domain-name here | ||||
|         data={"urls": url}, | ||||
|         url_for("form_quick_watch_add"), | ||||
|         data={"url": url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b"1 Imported" in res.data | ||||
|     assert b"Watch added in Paused state, saving will unpause" in res.data | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|     res = client.post( | ||||
|         url_for("edit_page", uuid="first"), | ||||
|         url_for("edit_page", uuid="first", unpause_on_save=1), | ||||
|         data={ | ||||
|                 "include_filters": "", | ||||
|                 "fetch_backend": "html_requests", | ||||
|                 "fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests', | ||||
|                 "headers": "", | ||||
|                 "proxy": "proxy-two", | ||||
|                 "tags": "", | ||||
| @@ -32,6 +30,6 @@ def test_preferred_proxy(client, live_server): | ||||
|               }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|     assert b"unpaused" in res.data | ||||
|     wait_for_all_checks(client) | ||||
|     # Now the request should appear in the second-squid logs | ||||
|   | ||||
| @@ -3,6 +3,7 @@ | ||||
| import time | ||||
| from flask import url_for | ||||
| from ..util import live_server_setup, wait_for_all_checks | ||||
| import os | ||||
|  | ||||
| # just make a request, we will grep in the docker logs to see it actually got called | ||||
| def test_select_custom(client, live_server): | ||||
| @@ -14,7 +15,7 @@ def test_select_custom(client, live_server): | ||||
|         data={ | ||||
|             "requests-time_between_check-minutes": 180, | ||||
|             "application-ignore_whitespace": "y", | ||||
|             "application-fetch_backend": "html_requests", | ||||
|             "application-fetch_backend": 'html_webdriver' if os.getenv('PLAYWRIGHT_DRIVER_URL') else 'html_requests', | ||||
|             "requests-extra_proxies-0-proxy_name": "custom-test-proxy", | ||||
|             # test:awesome is set in tests/proxy_list/squid-passwords.txt | ||||
|             "requests-extra_proxies-0-proxy_url": "http://test:awesome@squid-custom:3128", | ||||
|   | ||||
| @@ -95,7 +95,7 @@ def test_restock_detection(client, live_server): | ||||
|  | ||||
|     # We should have a notification | ||||
|     time.sleep(2) | ||||
|     assert os.path.isfile("test-datastore/notification.txt") | ||||
|     assert os.path.isfile("test-datastore/notification.txt"), "Notification received" | ||||
|     os.unlink("test-datastore/notification.txt") | ||||
|  | ||||
|     # Default behaviour is to only fire notification when it goes OUT OF STOCK -> IN STOCK | ||||
| @@ -103,4 +103,9 @@ def test_restock_detection(client, live_server): | ||||
|     set_original_response() | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     wait_for_all_checks(client) | ||||
|     assert not os.path.isfile("test-datastore/notification.txt") | ||||
|     assert not os.path.isfile("test-datastore/notification.txt"), "No notification should have fired when it went OUT OF STOCK by default" | ||||
|  | ||||
|     # BUT we should see that it correctly shows "not in stock" | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b'not-in-stock' in res.data, "Correctly showing NOT IN STOCK in the list after it changed from IN STOCK" | ||||
|  | ||||
|   | ||||
| @@ -97,6 +97,17 @@ def test_check_notification_email_formats_default_Text_override_HTML(client, liv | ||||
|     set_original_response() | ||||
|     global smtp_test_server | ||||
|     notification_url = f'mailto://changedetection@{smtp_test_server}:11025/?to=fff@home.com' | ||||
|     notification_body = f"""<!DOCTYPE html> | ||||
| <html lang="en"> | ||||
| <head> | ||||
|     <title>My Webpage</title> | ||||
| </head> | ||||
| <body> | ||||
|     <h1>Test</h1> | ||||
|     {default_notification_body} | ||||
| </body> | ||||
| </html> | ||||
| """ | ||||
|  | ||||
|     ##################### | ||||
|     # Set this up for when we remove the notification from the watch, it should fallback with these details | ||||
| @@ -104,7 +115,7 @@ def test_check_notification_email_formats_default_Text_override_HTML(client, liv | ||||
|         url_for("settings_page"), | ||||
|         data={"application-notification_urls": notification_url, | ||||
|               "application-notification_title": "fallback-title " + default_notification_title, | ||||
|               "application-notification_body": default_notification_body, | ||||
|               "application-notification_body": notification_body, | ||||
|               "application-notification_format": 'Text', | ||||
|               "requests-time_between_check-minutes": 180, | ||||
|               'application-fetch_backend': "html_requests"}, | ||||
| @@ -161,5 +172,10 @@ def test_check_notification_email_formats_default_Text_override_HTML(client, liv | ||||
|     assert 'Content-Type: text/html' in msg | ||||
|     assert '(removed) So let\'s see what happens.<br>' in msg  # the html part | ||||
|  | ||||
|     # https://github.com/dgtlmoon/changedetection.io/issues/2103 | ||||
|     assert '<h1>Test</h1>' in msg | ||||
|     assert '<' not in msg | ||||
|     assert 'Content-Type: text/html' in msg | ||||
|  | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|   | ||||
							
								
								
									
										
											BIN
										
									
								
								changedetectionio/tests/test2.pdf
									
									
									
									
									
										Normal file
									
								
							
							
						
						| @@ -96,7 +96,9 @@ def test_api_simple(client, live_server): | ||||
|     ) | ||||
|     assert watch_uuid in res.json.keys() | ||||
|     before_recheck_info = res.json[watch_uuid] | ||||
|  | ||||
|     assert before_recheck_info['last_checked'] != 0 | ||||
|  | ||||
|     #705 `last_changed` should be zero on the first check | ||||
|     assert before_recheck_info['last_changed'] == 0 | ||||
|     assert before_recheck_info['title'] == 'My test URL' | ||||
| @@ -157,6 +159,19 @@ def test_api_simple(client, live_server): | ||||
|     # @todo how to handle None/default global values? | ||||
|     assert watch['history_n'] == 2, "Found replacement history section, which is in its own API" | ||||
|  | ||||
|     assert watch.get('viewed') == False | ||||
|     # Loading the most recent snapshot should force viewed to become true | ||||
|     client.get(url_for("diff_history_page", uuid="first"), follow_redirects=True) | ||||
|  | ||||
|     time.sleep(3) | ||||
|     # Fetch the whole watch again, viewed should be true | ||||
|     res = client.get( | ||||
|         url_for("watch", uuid=watch_uuid), | ||||
|         headers={'x-api-key': api_key} | ||||
|     ) | ||||
|     watch = res.json | ||||
|     assert watch.get('viewed') == True | ||||
|  | ||||
|     # basic systeminfo check | ||||
|     res = client.get( | ||||
|         url_for("systeminfo"), | ||||
| @@ -343,3 +358,25 @@ def test_api_watch_PUT_update(client, live_server): | ||||
|     # Cleanup everything | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
|  | ||||
| def test_api_import(client, live_server): | ||||
|     api_key = extract_api_key_from_UI(client) | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("import") + "?tag=import-test", | ||||
|         data='https://website1.com\r\nhttps://website2.com', | ||||
|         headers={'x-api-key': api_key}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert res.status_code == 200 | ||||
|     assert len(res.json) == 2 | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b"https://website1.com" in res.data | ||||
|     assert b"https://website2.com" in res.data | ||||
|  | ||||
|     # Should see the new tag in the tag/groups list | ||||
|     res = client.get(url_for('tags.tags_overview_page')) | ||||
|     assert b'import-test' in res.data | ||||
|      | ||||
| @@ -29,7 +29,7 @@ def test_check_basic_change_detection_functionality(client, live_server): | ||||
|  | ||||
|     assert b"1 Imported" in res.data | ||||
|  | ||||
|     time.sleep(sleep_time_for_fetch_thread) | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # Do this a few times.. ensures we dont accidently set the status | ||||
|     for n in range(3): | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| #!/usr/bin/python3 | ||||
|  | ||||
| from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks | ||||
| from .util import set_original_response, live_server_setup, wait_for_all_checks | ||||
| from flask import url_for | ||||
| from urllib.request import urlopen | ||||
| import io | ||||
| from zipfile import ZipFile | ||||
| import re | ||||
| import time | ||||
| @@ -37,15 +37,10 @@ def test_backup(client, live_server): | ||||
|     # Should be PK/ZIP stream | ||||
|     assert res.data.count(b'PK') >= 2 | ||||
|  | ||||
|     # ZipFile from buffer seems non-obvious, just save it instead | ||||
|     with open("download.zip", 'wb') as f: | ||||
|         f.write(res.data) | ||||
|  | ||||
|     zip = ZipFile('download.zip') | ||||
|     l = zip.namelist() | ||||
|     backup = ZipFile(io.BytesIO(res.data)) | ||||
|     l = backup.namelist() | ||||
|     uuid4hex = re.compile('^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}.*txt', re.I) | ||||
|     newlist = list(filter(uuid4hex.match, l))  # Read Note below | ||||
|  | ||||
|     # Should be two txt files in the archive (history and the snapshot) | ||||
|     assert len(newlist) == 2 | ||||
|  | ||||
|   | ||||
| @@ -3,7 +3,7 @@ | ||||
| import time | ||||
|  | ||||
| from flask import url_for | ||||
| from . util import live_server_setup | ||||
| from .util import live_server_setup, wait_for_all_checks | ||||
|  | ||||
| from ..html_tools import * | ||||
|  | ||||
| @@ -30,7 +30,7 @@ def _runner_test_http_errors(client, live_server, http_code, expected_text): | ||||
|     assert b"1 Imported" in res.data | ||||
|  | ||||
|     # Give the thread time to pick it up | ||||
|     time.sleep(2) | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     res = client.get(url_for("index")) | ||||
|     # no change | ||||
| @@ -57,7 +57,7 @@ def _runner_test_http_errors(client, live_server, http_code, expected_text): | ||||
| def test_http_error_handler(client, live_server): | ||||
|     _runner_test_http_errors(client, live_server, 403, 'Access denied') | ||||
|     _runner_test_http_errors(client, live_server, 404, 'Page not found') | ||||
|     _runner_test_http_errors(client, live_server, 500, '(Internal server Error) received') | ||||
|     _runner_test_http_errors(client, live_server, 500, '(Internal server error) received') | ||||
|     _runner_test_http_errors(client, live_server, 400, 'Error - Request returned a HTTP error code 400') | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
| @@ -76,7 +76,7 @@ def test_DNS_errors(client, live_server): | ||||
|     assert b"1 Imported" in res.data | ||||
|  | ||||
|     # Give the thread time to pick it up | ||||
|     time.sleep(3) | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     res = client.get(url_for("index")) | ||||
|     found_name_resolution_error = b"Temporary failure in name resolution" in res.data or b"Name or service not known" in res.data | ||||
| @@ -104,7 +104,7 @@ def test_low_level_errors_clear_correctly(client, live_server): | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"1 Imported" in res.data | ||||
|     time.sleep(2) | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # We should see the DNS error | ||||
|     res = client.get(url_for("index")) | ||||
| @@ -121,7 +121,7 @@ def test_low_level_errors_clear_correctly(client, live_server): | ||||
|     ) | ||||
|  | ||||
|     # Now the error should be gone | ||||
|     time.sleep(2) | ||||
|     wait_for_all_checks(client) | ||||
|     res = client.get(url_for("index")) | ||||
|     found_name_resolution_error = b"Temporary failure in name resolution" in res.data or b"Name or service not known" in res.data | ||||
|     assert not found_name_resolution_error | ||||
|   | ||||
| @@ -227,9 +227,6 @@ def test_regex_error_handling(client, live_server): | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     with open('/tmp/fuck.html', 'wb') as f: | ||||
|         f.write(res.data) | ||||
|  | ||||
|     assert b'is not a valid regular expression.' in res.data | ||||
|  | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|   | ||||
| @@ -321,3 +321,154 @@ def test_clone_tag_on_quickwatchform_add(client, live_server): | ||||
|  | ||||
|     res = client.get(url_for("tags.delete_all"), follow_redirects=True) | ||||
|     assert b'All tags deleted' in res.data | ||||
|  | ||||
| def test_order_of_filters_tag_filter_and_watch_filter(client, live_server): | ||||
|  | ||||
|     # Add a tag with some config, import a tag and it should roughly work | ||||
|     res = client.post( | ||||
|         url_for("tags.form_tag_add"), | ||||
|         data={"name": "test-tag-keep-order"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Tag added" in res.data | ||||
|     assert b"test-tag-keep-order" in res.data | ||||
|     tag_filters = [ | ||||
|             '#only-this', # duplicated filters | ||||
|             '#only-this', | ||||
|             '#only-this', | ||||
|             '#only-this', | ||||
|             ] | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("tags.form_tag_edit_submit", uuid="first"), | ||||
|         data={"name": "test-tag-keep-order", | ||||
|               "include_filters": '\n'.join(tag_filters) }, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated" in res.data | ||||
|     tag_uuid = get_UUID_for_tag_name(client, name="test-tag-keep-order") | ||||
|     res = client.get( | ||||
|         url_for("tags.form_tag_edit", uuid="first") | ||||
|     ) | ||||
|     assert b"#only-this" in res.data | ||||
|  | ||||
|  | ||||
|     d = """<html> | ||||
|        <body> | ||||
|      Some initial text<br> | ||||
|      <p id="only-this">And 1 this</p> | ||||
|      <br> | ||||
|      <p id="not-this">And 2 this</p> | ||||
|      <p id="">And 3 this</p><!--/html/body/p[3]/--> | ||||
|      <p id="">And 4 this</p><!--/html/body/p[4]/--> | ||||
|      <p id="">And 5 this</p><!--/html/body/p[5]/--> | ||||
|      <p id="">And 6 this</p><!--/html/body/p[6]/--> | ||||
|      <p id="">And 7 this</p><!--/html/body/p[7]/--> | ||||
|      <p id="">And 8 this</p><!--/html/body/p[8]/--> | ||||
|      <p id="">And 9 this</p><!--/html/body/p[9]/--> | ||||
|      <p id="">And 10 this</p><!--/html/body/p[10]/--> | ||||
|      <p id="">And 11 this</p><!--/html/body/p[11]/--> | ||||
|      <p id="">And 12 this</p><!--/html/body/p[12]/--> | ||||
|      <p id="">And 13 this</p><!--/html/body/p[13]/--> | ||||
|      <p id="">And 14 this</p><!--/html/body/p[14]/--> | ||||
|      <p id="not-this">And 15 this</p><!--/html/body/p[15]/--> | ||||
|      </body> | ||||
|      </html> | ||||
|     """ | ||||
|  | ||||
|     with open("test-datastore/endpoint-content.txt", "w") as f: | ||||
|         f.write(d) | ||||
|  | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
|     res = client.post( | ||||
|         url_for("import_page"), | ||||
|         data={"urls": test_url}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"1 Imported" in res.data | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     filters = [ | ||||
|             '/html/body/p[3]', | ||||
|             '/html/body/p[4]', | ||||
|             '/html/body/p[5]', | ||||
|             '/html/body/p[6]', | ||||
|             '/html/body/p[7]', | ||||
|             '/html/body/p[8]', | ||||
|             '/html/body/p[9]', | ||||
|             '/html/body/p[10]', | ||||
|             '/html/body/p[11]', | ||||
|             '/html/body/p[12]', | ||||
|             '/html/body/p[13]', # duplicated tags | ||||
|             '/html/body/p[13]', | ||||
|             '/html/body/p[13]', | ||||
|             '/html/body/p[13]', | ||||
|             '/html/body/p[13]', | ||||
|             '/html/body/p[14]', | ||||
|             ] | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("edit_page", uuid="first"), | ||||
|         data={"include_filters": '\n'.join(filters), | ||||
|             "url": test_url, | ||||
|             "tags": "test-tag-keep-order", | ||||
|             "headers": "", | ||||
|             'fetch_backend': "html_requests"}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b"Updated watch." in res.data | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     res = client.get( | ||||
|         url_for("preview_page", uuid="first"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b"And 1 this" in res.data  # test-tag-keep-order | ||||
|  | ||||
|     a_tag_filter_check = b'And 1 this' #'#only-this' of tag_filters | ||||
|     # check there is no duplication of tag_filters | ||||
|     assert res.data.count(a_tag_filter_check) == 1, f"duplicated filters didn't removed {res.data.count(a_tag_filter_check)} of {a_tag_filter_check} in {res.data=}" | ||||
|  | ||||
|     a_filter_check = b"And 13 this" # '/html/body/p[13]' | ||||
|     # check there is no duplication of filters | ||||
|     assert res.data.count(a_filter_check) == 1, f"duplicated filters didn't removed. {res.data.count(a_filter_check)} of {a_filter_check} in {res.data=}" | ||||
|  | ||||
|     a_filter_check_not_include = b"And 2 this" # '/html/body/p[2]' | ||||
|     assert a_filter_check_not_include not in res.data | ||||
|  | ||||
|     checklist = [ | ||||
|             b"And 3 this", | ||||
|             b"And 4 this", | ||||
|             b"And 5 this", | ||||
|             b"And 6 this", | ||||
|             b"And 7 this", | ||||
|             b"And 8 this", | ||||
|             b"And 9 this", | ||||
|             b"And 10 this", | ||||
|             b"And 11 this", | ||||
|             b"And 12 this", | ||||
|             b"And 13 this", | ||||
|             b"And 14 this", | ||||
|             b"And 1 this", # result of filter from tag. | ||||
|             ] | ||||
|     # check whether everything a user requested is there | ||||
|     for test in checklist: | ||||
|         assert test in res.data | ||||
|  | ||||
|     # check whether everything a user requested is in order of filters. | ||||
|     n = 0 | ||||
|     for test in checklist: | ||||
|         t_index = res.data[n:].find(test) | ||||
|         # if the text is not searched, return -1. | ||||
|         assert t_index >= 0, f"""failed because {test=} not in {res.data[n:]=} | ||||
| ##################### | ||||
| Looks like some feature changed the order of result of filters. | ||||
| ##################### | ||||
| the {test} appeared before. {test in res.data[:n]=} | ||||
| {res.data[:n]=} | ||||
|         """ | ||||
|         n += t_index + len(test) | ||||
|  | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|   | ||||
| @@ -456,7 +456,7 @@ def test_ignore_json_order(client, live_server): | ||||
|  | ||||
| def test_correct_header_detect(client, live_server): | ||||
|     # Like in https://github.com/dgtlmoon/changedetection.io/pull/1593 | ||||
|     # Specify extra html that JSON is sometimes wrapped in - when using Browserless/Puppeteer etc | ||||
|     # Specify extra html that JSON is sometimes wrapped in - when using SockpuppetBrowser / Puppeteer / Playwrightetc | ||||
|     with open("test-datastore/endpoint-content.txt", "w") as f: | ||||
|         f.write('<html><body>{"hello" : 123, "world": 123}') | ||||
|  | ||||
|   | ||||
| @@ -281,7 +281,8 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server): | ||||
|  | ||||
|     # CUSTOM JSON BODY CHECK for POST:// | ||||
|     set_original_response() | ||||
|     test_notification_url = url_for('test_notification_endpoint', _external=True).replace('http://', 'post://')+"?xxx={{ watch_url }}" | ||||
|     # https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#header-manipulation | ||||
|     test_notification_url = url_for('test_notification_endpoint', _external=True).replace('http://', 'post://')+"?xxx={{ watch_url }}&+custom-header=123" | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("settings_page"), | ||||
| @@ -297,10 +298,7 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server): | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|     assert b'Settings updated' in res.data | ||||
|     client.get( | ||||
|         url_for("form_delete", uuid="all"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     # Add a watch and trigger a HTTP POST | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
|     res = client.post( | ||||
| @@ -315,7 +313,9 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server): | ||||
|     set_modified_response() | ||||
|  | ||||
|     client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     time.sleep(2) | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     time.sleep(2) # plus extra delay for notifications to fire | ||||
|  | ||||
|     with open("test-datastore/notification.txt", 'r') as f: | ||||
|         x = f.read() | ||||
| @@ -328,6 +328,13 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server): | ||||
|     with open("test-datastore/notification-url.txt", 'r') as f: | ||||
|         notification_url = f.read() | ||||
|         assert 'xxx=http' in notification_url | ||||
|         # apprise style headers should be stripped | ||||
|         assert 'custom-header' not in notification_url | ||||
|  | ||||
|     with open("test-datastore/notification-headers.txt", 'r') as f: | ||||
|         notification_headers = f.read() | ||||
|         assert 'custom-header: 123' in notification_headers.lower() | ||||
|  | ||||
|  | ||||
|     # Should always be automatically detected as JSON content type even when we set it as 'Text' (default) | ||||
|     assert os.path.isfile("test-datastore/notification-content-type.txt") | ||||
| @@ -335,3 +342,8 @@ def test_notification_custom_endpoint_and_jinja2(client, live_server): | ||||
|         assert 'application/json' in f.read() | ||||
|  | ||||
|     os.unlink("test-datastore/notification-url.txt") | ||||
|  | ||||
|     client.get( | ||||
|         url_for("form_delete", uuid="all"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|   | ||||
| @@ -1,8 +1,7 @@ | ||||
| import os | ||||
| import time | ||||
| import re | ||||
| from flask import url_for | ||||
| from . util import set_original_response, set_modified_response, live_server_setup | ||||
| from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks | ||||
| import logging | ||||
|  | ||||
| def test_check_notification_error_handling(client, live_server): | ||||
| @@ -11,7 +10,7 @@ def test_check_notification_error_handling(client, live_server): | ||||
|     set_original_response() | ||||
|  | ||||
|     # Give the endpoint time to spin up | ||||
|     time.sleep(2) | ||||
|     time.sleep(1) | ||||
|  | ||||
|     # Set a URL and fetch it, then set a notification URL which is going to give errors | ||||
|     test_url = url_for('test_endpoint', _external=True) | ||||
| @@ -22,12 +21,16 @@ def test_check_notification_error_handling(client, live_server): | ||||
|     ) | ||||
|     assert b"Watch added" in res.data | ||||
|  | ||||
|     time.sleep(2) | ||||
|     wait_for_all_checks(client) | ||||
|     set_modified_response() | ||||
|  | ||||
|     working_notification_url = url_for('test_notification_endpoint', _external=True).replace('http', 'json') | ||||
|     broken_notification_url = "jsons://broken-url-xxxxxxxx123/test" | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("edit_page", uuid="first"), | ||||
|         data={"notification_urls": "jsons://broken-url-xxxxxxxx123/test", | ||||
|         # A URL with errors should not block the one that is working | ||||
|         data={"notification_urls": f"{broken_notification_url}\r\n{working_notification_url}", | ||||
|               "notification_title": "xxx", | ||||
|               "notification_body": "xxxxx", | ||||
|               "notification_format": "Text", | ||||
| @@ -63,4 +66,10 @@ def test_check_notification_error_handling(client, live_server): | ||||
|     found_name_resolution_error = b"Temporary failure in name resolution" in res.data or b"Name or service not known" in res.data | ||||
|     assert found_name_resolution_error | ||||
|  | ||||
|     # And the working one, which is after the 'broken' one should still have fired | ||||
|     with open("test-datastore/notification.txt", "r") as f: | ||||
|         notification_submission = f.read() | ||||
|     os.unlink("test-datastore/notification.txt") | ||||
|     assert 'xxxxx' in notification_submission | ||||
|  | ||||
|     client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|   | ||||
| @@ -2,9 +2,8 @@ | ||||
|  | ||||
| import time | ||||
| from flask import url_for | ||||
| from .util import set_original_response, set_modified_response, live_server_setup | ||||
| from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks | ||||
|  | ||||
| sleep_time_for_fetch_thread = 3 | ||||
|  | ||||
| # `subtractive_selectors` should still work in `source:` type requests | ||||
| def test_fetch_pdf(client, live_server): | ||||
| @@ -22,19 +21,56 @@ def test_fetch_pdf(client, live_server): | ||||
|  | ||||
|     assert b"1 Imported" in res.data | ||||
|  | ||||
|     time.sleep(sleep_time_for_fetch_thread) | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     res = client.get( | ||||
|         url_for("preview_page", uuid="first"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert b'PDF-1.5' not in res.data | ||||
|     # PDF header should not be there (it was converted to text) | ||||
|     assert b'PDF' not in res.data[:10] | ||||
|     assert b'hello world' in res.data | ||||
|  | ||||
|     # So we know if the file changes in other ways | ||||
|     import hashlib | ||||
|     md5 = hashlib.md5(open("test-datastore/endpoint-test.pdf", 'rb').read()).hexdigest().upper() | ||||
|     original_md5 = hashlib.md5(open("test-datastore/endpoint-test.pdf", 'rb').read()).hexdigest().upper() | ||||
|     # We should have one | ||||
|     assert len(md5) >0 | ||||
|     assert len(original_md5) >0 | ||||
|     # And it's going to be in the document | ||||
|     assert b'Document checksum - '+bytes(str(md5).encode('utf-8')) in res.data | ||||
|     assert b'Document checksum - '+bytes(str(original_md5).encode('utf-8')) in res.data | ||||
|  | ||||
|  | ||||
|     shutil.copy("tests/test2.pdf", "test-datastore/endpoint-test.pdf") | ||||
|     changed_md5 = hashlib.md5(open("test-datastore/endpoint-test.pdf", 'rb').read()).hexdigest().upper() | ||||
|     res = client.get(url_for("form_watch_checknow"), follow_redirects=True) | ||||
|     assert b'1 watches queued for rechecking.' in res.data | ||||
|  | ||||
|     wait_for_all_checks(client) | ||||
|  | ||||
|     # Now something should be ready, indicated by having a 'unviewed' class | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b'unviewed' in res.data | ||||
|  | ||||
|     # The original checksum should be not be here anymore (cdio adds it to the bottom of the text) | ||||
|  | ||||
|     res = client.get( | ||||
|         url_for("preview_page", uuid="first"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert original_md5.encode('utf-8') not in res.data | ||||
|     assert changed_md5.encode('utf-8') in res.data | ||||
|  | ||||
|  | ||||
|     res = client.get( | ||||
|         url_for("diff_history_page", uuid="first"), | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert original_md5.encode('utf-8') in res.data | ||||
|     assert changed_md5.encode('utf-8') in res.data | ||||
|  | ||||
|     assert b'here is a change' in res.data | ||||
|      | ||||