mirror of
				https://github.com/dgtlmoon/changedetection.io.git
				synced 2025-11-04 08:34:57 +00:00 
			
		
		
		
	Compare commits
	
		
			2 Commits
		
	
	
		
			upgrade-pl
			...
			highlight-
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					6ff424de24 | ||
| 
						 | 
					6ad0eb736d | 
							
								
								
									
										16
									
								
								.github/workflows/containers.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										16
									
								
								.github/workflows/containers.yml
									
									
									
									
										vendored
									
									
								
							@@ -96,9 +96,8 @@ jobs:
 | 
			
		||||
          tags: |
 | 
			
		||||
            ${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev
 | 
			
		||||
          platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
 | 
			
		||||
          cache-from: type=gha
 | 
			
		||||
          cache-to: type=gha,mode=max
 | 
			
		||||
 | 
			
		||||
          cache-from: type=local,src=/tmp/.buildx-cache
 | 
			
		||||
          cache-to: type=local,dest=/tmp/.buildx-cache
 | 
			
		||||
# Looks like this was disabled
 | 
			
		||||
#          provenance: false
 | 
			
		||||
 | 
			
		||||
@@ -117,11 +116,18 @@ jobs:
 | 
			
		||||
            ${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest
 | 
			
		||||
            ghcr.io/dgtlmoon/changedetection.io:latest
 | 
			
		||||
          platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7,linux/arm/v8
 | 
			
		||||
          cache-from: type=gha
 | 
			
		||||
          cache-to: type=gha,mode=max
 | 
			
		||||
          cache-from: type=local,src=/tmp/.buildx-cache
 | 
			
		||||
          cache-to: type=local,dest=/tmp/.buildx-cache
 | 
			
		||||
# Looks like this was disabled
 | 
			
		||||
#          provenance: false
 | 
			
		||||
 | 
			
		||||
      - name: Image digest
 | 
			
		||||
        run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}
 | 
			
		||||
 | 
			
		||||
      - name: Cache Docker layers
 | 
			
		||||
        uses: actions/cache@v3
 | 
			
		||||
        with:
 | 
			
		||||
          path: /tmp/.buildx-cache
 | 
			
		||||
          key: ${{ runner.os }}-buildx-${{ github.sha }}
 | 
			
		||||
          restore-keys: |
 | 
			
		||||
            ${{ runner.os }}-buildx-
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										14
									
								
								.github/workflows/test-only.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										14
									
								
								.github/workflows/test-only.yml
									
									
									
									
										vendored
									
									
								
							@@ -29,11 +29,8 @@ jobs:
 | 
			
		||||
          docker network create changedet-network
 | 
			
		||||
 | 
			
		||||
          # Selenium+browserless
 | 
			
		||||
          docker run --network changedet-network -d --hostname selenium  -p 4444:4444 --rm --shm-size="2g"  selenium/standalone-chrome:4
 | 
			
		||||
          docker run --network changedet-network -d --name browserless --hostname browserless -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm  -p 3000:3000  --shm-size="2g"  browserless/chrome:1.60-chrome-stable
 | 
			
		||||
          
 | 
			
		||||
          # For accessing custom browser tests
 | 
			
		||||
          docker run --network changedet-network -d --name browserless-custom-url --hostname browserless-custom-url -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm --shm-size="2g"  browserless/chrome:1.60-chrome-stable
 | 
			
		||||
          docker run --network changedet-network -d --hostname selenium  -p 4444:4444 --rm --shm-size="2g"  selenium/standalone-chrome-debug:3.141.59
 | 
			
		||||
          docker run --network changedet-network -d --hostname browserless -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm  -p 3000:3000  --shm-size="2g"  browserless/chrome:1.53-chrome-stable
 | 
			
		||||
 | 
			
		||||
      - name: Build changedetection.io container for testing
 | 
			
		||||
        run: |         
 | 
			
		||||
@@ -51,7 +48,6 @@ jobs:
 | 
			
		||||
        run: |
 | 
			
		||||
          # Unit tests
 | 
			
		||||
          docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_notification_diff'
 | 
			
		||||
          docker run test-changedetectionio  bash -c 'python3 -m unittest changedetectionio.tests.unit.test_watch_model'
 | 
			
		||||
          
 | 
			
		||||
          # All tests
 | 
			
		||||
          docker run --network changedet-network  test-changedetectionio  bash -c 'cd changedetectionio && ./run_basic_tests.sh'
 | 
			
		||||
@@ -90,12 +86,6 @@ jobs:
 | 
			
		||||
          # And again with PLAYWRIGHT_DRIVER_URL=..
 | 
			
		||||
          cd ..
 | 
			
		||||
 | 
			
		||||
      - name: Test custom browser URL
 | 
			
		||||
        run: |
 | 
			
		||||
          cd changedetectionio
 | 
			
		||||
          ./run_custom_browser_url_tests.sh
 | 
			
		||||
          cd ..
 | 
			
		||||
 | 
			
		||||
      - name: Test changedetection.io container starts+runs basically without error
 | 
			
		||||
        run: |
 | 
			
		||||
          docker run -p 5556:5000 -d test-changedetectionio
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,5 @@
 | 
			
		||||
# pip dependencies install stage
 | 
			
		||||
FROM python:3.11-slim-bookworm as builder
 | 
			
		||||
FROM python:3.11-slim-bullseye as builder
 | 
			
		||||
 | 
			
		||||
# See `cryptography` pin comment in requirements.txt
 | 
			
		||||
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
 | 
			
		||||
@@ -25,13 +25,14 @@ RUN pip install --target=/dependencies -r /requirements.txt
 | 
			
		||||
# Playwright is an alternative to Selenium
 | 
			
		||||
# Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing
 | 
			
		||||
# https://github.com/dgtlmoon/changedetection.io/pull/1067 also musl/alpine (not supported)
 | 
			
		||||
RUN pip install --target=/dependencies playwright~=1.40 \
 | 
			
		||||
RUN pip install --target=/dependencies playwright~=1.27.1 \
 | 
			
		||||
    || echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
 | 
			
		||||
 | 
			
		||||
# Final image stage
 | 
			
		||||
FROM python:3.11-slim-bookworm
 | 
			
		||||
FROM python:3.11-slim-bullseye
 | 
			
		||||
 | 
			
		||||
RUN apt-get update && apt-get install -y --no-install-recommends \
 | 
			
		||||
    libssl1.1 \
 | 
			
		||||
    libxslt1.1 \
 | 
			
		||||
    # For pdftohtml
 | 
			
		||||
    poppler-utils \
 | 
			
		||||
 
 | 
			
		||||
@@ -16,4 +16,3 @@ global-exclude venv
 | 
			
		||||
 | 
			
		||||
global-exclude test-datastore
 | 
			
		||||
global-exclude changedetection.io*dist-info
 | 
			
		||||
global-exclude changedetectionio/tests/proxy_socks5/test-datastore
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										11
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										11
									
								
								README.md
									
									
									
									
									
								
							@@ -232,13 +232,6 @@ See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configura
 | 
			
		||||
 | 
			
		||||
Raspberry Pi and linux/arm/v6 linux/arm/v7 arm64 devices are supported! See the wiki for [details](https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver)
 | 
			
		||||
 | 
			
		||||
## Import support
 | 
			
		||||
 | 
			
		||||
Easily [import your list of websites to watch for changes in Excel .xslx file format](https://changedetection.io/tutorial/how-import-your-website-change-detection-lists-excel), or paste in lists of website URLs as plaintext. 
 | 
			
		||||
 | 
			
		||||
Excel import is recommended - that way you can better organise tags/groups of websites and other features.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
## API Support
 | 
			
		||||
 | 
			
		||||
Supports managing the website watch list [via our API](https://changedetection.io/docs/api_v1/index.html)
 | 
			
		||||
@@ -268,7 +261,3 @@ I offer commercial support, this software is depended on by network security, ae
 | 
			
		||||
[license-shield]: https://img.shields.io/github/license/dgtlmoon/changedetection.io.svg?style=for-the-badge
 | 
			
		||||
[release-link]: https://github.com/dgtlmoon/changedetection.io/releases
 | 
			
		||||
[docker-link]: https://hub.docker.com/r/dgtlmoon/changedetection.io
 | 
			
		||||
 | 
			
		||||
## Third-party licenses
 | 
			
		||||
 | 
			
		||||
changedetectionio.html_tools.elementpath_tostring: Copyright (c), 2018-2021, SISSA (Scuola Internazionale Superiore di Studi Avanzati), Licensed under [MIT license](https://github.com/sissaschool/elementpath/blob/master/LICENSE)
 | 
			
		||||
 
 | 
			
		||||
@@ -38,7 +38,7 @@ from flask_paginate import Pagination, get_page_parameter
 | 
			
		||||
from changedetectionio import html_tools
 | 
			
		||||
from changedetectionio.api import api_v1
 | 
			
		||||
 | 
			
		||||
__version__ = '0.45.7.3'
 | 
			
		||||
__version__ = '0.45.3'
 | 
			
		||||
 | 
			
		||||
from changedetectionio.store import BASE_URL_NOT_SET_TEXT
 | 
			
		||||
 | 
			
		||||
@@ -105,10 +105,6 @@ def get_darkmode_state():
 | 
			
		||||
    css_dark_mode = request.cookies.get('css_dark_mode', 'false')
 | 
			
		||||
    return 'true' if css_dark_mode and strtobool(css_dark_mode) else 'false'
 | 
			
		||||
 | 
			
		||||
@app.template_global()
 | 
			
		||||
def get_css_version():
 | 
			
		||||
    return __version__
 | 
			
		||||
 | 
			
		||||
# We use the whole watch object from the store/JSON so we can see if there's some related status in terms of a thread
 | 
			
		||||
# running or something similar.
 | 
			
		||||
@app.template_filter('format_last_checked_time')
 | 
			
		||||
@@ -420,18 +416,11 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
 | 
			
		||||
        # Sort by last_changed and add the uuid which is usually the key..
 | 
			
		||||
        sorted_watches = []
 | 
			
		||||
        with_errors = request.args.get('with_errors') == "1"
 | 
			
		||||
        errored_count = 0
 | 
			
		||||
        search_q = request.args.get('q').strip().lower() if request.args.get('q') else False
 | 
			
		||||
        for uuid, watch in datastore.data['watching'].items():
 | 
			
		||||
            if with_errors and not watch.get('last_error'):
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            if limit_tag and not limit_tag in watch['tags']:
 | 
			
		||||
                    continue
 | 
			
		||||
            if watch.get('last_error'):
 | 
			
		||||
                errored_count += 1
 | 
			
		||||
                
 | 
			
		||||
 | 
			
		||||
            if search_q:
 | 
			
		||||
                if (watch.get('title') and search_q in watch.get('title').lower()) or search_q in watch.get('url', '').lower():
 | 
			
		||||
                    sorted_watches.append(watch)
 | 
			
		||||
@@ -453,7 +442,6 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
                                 active_tag=limit_tag,
 | 
			
		||||
                                 app_rss_token=datastore.data['settings']['application']['rss_access_token'],
 | 
			
		||||
                                 datastore=datastore,
 | 
			
		||||
                                 errored_count=errored_count,
 | 
			
		||||
                                 form=form,
 | 
			
		||||
                                 guid=datastore.data['app_guid'],
 | 
			
		||||
                                 has_proxies=datastore.proxy_list,
 | 
			
		||||
@@ -614,8 +602,6 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
        # For the form widget tag uuid lookup
 | 
			
		||||
        form.tags.datastore = datastore # in _value
 | 
			
		||||
 | 
			
		||||
        for p in datastore.extra_browsers:
 | 
			
		||||
            form.fetch_backend.choices.append(p)
 | 
			
		||||
 | 
			
		||||
        form.fetch_backend.choices.append(("system", 'System settings default'))
 | 
			
		||||
 | 
			
		||||
@@ -636,6 +622,7 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
 | 
			
		||||
            if request.args.get('unpause_on_save'):
 | 
			
		||||
                extra_update_obj['paused'] = False
 | 
			
		||||
 | 
			
		||||
            # Re #110, if they submit the same as the default value, set it to None, so we continue to follow the default
 | 
			
		||||
            # Assume we use the default value, unless something relevant is different, then use the form value
 | 
			
		||||
            # values could be None, 0 etc.
 | 
			
		||||
@@ -716,16 +703,16 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
            system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver'
 | 
			
		||||
 | 
			
		||||
            is_html_webdriver = False
 | 
			
		||||
            if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver' or watch.get('fetch_backend', '').startswith('extra_browser_'):
 | 
			
		||||
            if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver':
 | 
			
		||||
                is_html_webdriver = True
 | 
			
		||||
 | 
			
		||||
            # Only works reliably with Playwright
 | 
			
		||||
            visualselector_enabled = os.getenv('PLAYWRIGHT_DRIVER_URL', False) and is_html_webdriver
 | 
			
		||||
 | 
			
		||||
            output = render_template("edit.html",
 | 
			
		||||
                                     available_processors=processors.available_processors(),
 | 
			
		||||
                                     browser_steps_config=browser_step_ui_config,
 | 
			
		||||
                                     emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
 | 
			
		||||
                                     extra_title=f" - Edit - {watch.label}",
 | 
			
		||||
                                     form=form,
 | 
			
		||||
                                     has_default_notification_urls=True if len(datastore.data['settings']['application']['notification_urls']) else False,
 | 
			
		||||
                                     has_empty_checktime=using_default_check_time,
 | 
			
		||||
@@ -821,16 +808,6 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
 | 
			
		||||
        return output
 | 
			
		||||
 | 
			
		||||
    @app.route("/settings/reset-api-key", methods=['GET'])
 | 
			
		||||
    @login_optionally_required
 | 
			
		||||
    def settings_reset_api_key():
 | 
			
		||||
        import secrets
 | 
			
		||||
        secret = secrets.token_hex(16)
 | 
			
		||||
        datastore.data['settings']['application']['api_access_token'] = secret
 | 
			
		||||
        datastore.needs_write_urgent = True
 | 
			
		||||
        flash("API Key was regenerated.")
 | 
			
		||||
        return redirect(url_for('settings_page')+'#api')
 | 
			
		||||
 | 
			
		||||
    @app.route("/import", methods=['GET', "POST"])
 | 
			
		||||
    @login_optionally_required
 | 
			
		||||
    def import_page():
 | 
			
		||||
@@ -838,7 +815,6 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
        from . import forms
 | 
			
		||||
 | 
			
		||||
        if request.method == 'POST':
 | 
			
		||||
 | 
			
		||||
            from .importer import import_url_list, import_distill_io_json
 | 
			
		||||
 | 
			
		||||
            # URL List import
 | 
			
		||||
@@ -862,32 +838,11 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
                for uuid in d_importer.new_uuids:
 | 
			
		||||
                    update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
 | 
			
		||||
 | 
			
		||||
            # XLSX importer
 | 
			
		||||
            if request.files and request.files.get('xlsx_file'):
 | 
			
		||||
                file = request.files['xlsx_file']
 | 
			
		||||
                from .importer import import_xlsx_wachete, import_xlsx_custom
 | 
			
		||||
 | 
			
		||||
                if request.values.get('file_mapping') == 'wachete':
 | 
			
		||||
                    w_importer = import_xlsx_wachete()
 | 
			
		||||
                    w_importer.run(data=file, flash=flash, datastore=datastore)
 | 
			
		||||
                else:
 | 
			
		||||
                    w_importer = import_xlsx_custom()
 | 
			
		||||
                    # Building mapping of col # to col # type
 | 
			
		||||
                    map = {}
 | 
			
		||||
                    for i in range(10):
 | 
			
		||||
                        c = request.values.get(f"custom_xlsx[col_{i}]")
 | 
			
		||||
                        v = request.values.get(f"custom_xlsx[col_type_{i}]")
 | 
			
		||||
                        if c and v:
 | 
			
		||||
                            map[int(c)] = v
 | 
			
		||||
 | 
			
		||||
                    w_importer.import_profile = map
 | 
			
		||||
                    w_importer.run(data=file, flash=flash, datastore=datastore)
 | 
			
		||||
 | 
			
		||||
                for uuid in w_importer.new_uuids:
 | 
			
		||||
                    update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': True}))
 | 
			
		||||
 | 
			
		||||
        form = forms.importForm(formdata=request.form if request.method == 'POST' else None,
 | 
			
		||||
#                               data=default,
 | 
			
		||||
                               )
 | 
			
		||||
        # Could be some remaining, or we could be on GET
 | 
			
		||||
        form = forms.importForm(formdata=request.form if request.method == 'POST' else None)
 | 
			
		||||
        output = render_template("import.html",
 | 
			
		||||
                                 form=form,
 | 
			
		||||
                                 import_url_list_remaining="\n".join(remaining_urls),
 | 
			
		||||
@@ -901,10 +856,7 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
    def mark_all_viewed():
 | 
			
		||||
 | 
			
		||||
        # Save the current newest history as the most recently viewed
 | 
			
		||||
        with_errors = request.args.get('with_errors') == "1"
 | 
			
		||||
        for watch_uuid, watch in datastore.data['watching'].items():
 | 
			
		||||
            if with_errors and not watch.get('last_error'):
 | 
			
		||||
                continue
 | 
			
		||||
            datastore.set_last_viewed(watch_uuid, int(time.time()))
 | 
			
		||||
 | 
			
		||||
        return redirect(url_for('index'))
 | 
			
		||||
@@ -960,36 +912,28 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
 | 
			
		||||
        # Read as binary and force decode as UTF-8
 | 
			
		||||
        # Windows may fail decode in python if we just use 'r' mode (chardet decode exception)
 | 
			
		||||
        from_version = request.args.get('from_version')
 | 
			
		||||
        from_version_index = -2  # second newest
 | 
			
		||||
        if from_version and from_version in dates:
 | 
			
		||||
            from_version_index = dates.index(from_version)
 | 
			
		||||
        else:
 | 
			
		||||
            from_version = dates[from_version_index]
 | 
			
		||||
        try:
 | 
			
		||||
            newest_version_file_contents = watch.get_history_snapshot(dates[-1])
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            newest_version_file_contents = "Unable to read {}.\n".format(dates[-1])
 | 
			
		||||
 | 
			
		||||
        previous_version = request.args.get('previous_version')
 | 
			
		||||
        previous_timestamp = dates[-2]
 | 
			
		||||
        if previous_version:
 | 
			
		||||
            previous_timestamp = previous_version
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            from_version_file_contents = watch.get_history_snapshot(dates[from_version_index])
 | 
			
		||||
            previous_version_file_contents = watch.get_history_snapshot(previous_timestamp)
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            from_version_file_contents = f"Unable to read to-version at index {dates[from_version_index]}.\n"
 | 
			
		||||
            previous_version_file_contents = "Unable to read {}.\n".format(previous_timestamp)
 | 
			
		||||
 | 
			
		||||
        to_version = request.args.get('to_version')
 | 
			
		||||
        to_version_index = -1
 | 
			
		||||
        if to_version and to_version in dates:
 | 
			
		||||
            to_version_index = dates.index(to_version)
 | 
			
		||||
        else:
 | 
			
		||||
            to_version = dates[to_version_index]
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            to_version_file_contents = watch.get_history_snapshot(dates[to_version_index])
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            to_version_file_contents = "Unable to read to-version at index{}.\n".format(dates[to_version_index])
 | 
			
		||||
 | 
			
		||||
        screenshot_url = watch.get_screenshot()
 | 
			
		||||
 | 
			
		||||
        system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver'
 | 
			
		||||
 | 
			
		||||
        is_html_webdriver = False
 | 
			
		||||
        if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver' or watch.get('fetch_backend', '').startswith('extra_browser_'):
 | 
			
		||||
        if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver':
 | 
			
		||||
            is_html_webdriver = True
 | 
			
		||||
 | 
			
		||||
        password_enabled_and_share_is_off = False
 | 
			
		||||
@@ -998,24 +942,22 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
 | 
			
		||||
        output = render_template("diff.html",
 | 
			
		||||
                                 current_diff_url=watch['url'],
 | 
			
		||||
                                 from_version=str(from_version),
 | 
			
		||||
                                 to_version=str(to_version),
 | 
			
		||||
                                 current_previous_version=str(previous_version),
 | 
			
		||||
                                 extra_stylesheets=extra_stylesheets,
 | 
			
		||||
                                 extra_title=f" - Diff - {watch.label}",
 | 
			
		||||
                                 extra_title=" - Diff - {}".format(watch['title'] if watch['title'] else watch['url']),
 | 
			
		||||
                                 extract_form=extract_form,
 | 
			
		||||
                                 is_html_webdriver=is_html_webdriver,
 | 
			
		||||
                                 last_error=watch['last_error'],
 | 
			
		||||
                                 last_error_screenshot=watch.get_error_snapshot(),
 | 
			
		||||
                                 last_error_text=watch.get_error_text(),
 | 
			
		||||
                                 left_sticky=True,
 | 
			
		||||
                                 newest=to_version_file_contents,
 | 
			
		||||
                                 newest=newest_version_file_contents,
 | 
			
		||||
                                 newest_version_timestamp=dates[-1],
 | 
			
		||||
                                 password_enabled_and_share_is_off=password_enabled_and_share_is_off,
 | 
			
		||||
                                 from_version_file_contents=from_version_file_contents,
 | 
			
		||||
                                 to_version_file_contents=to_version_file_contents,
 | 
			
		||||
                                 previous=previous_version_file_contents,
 | 
			
		||||
                                 screenshot=screenshot_url,
 | 
			
		||||
                                 uuid=uuid,
 | 
			
		||||
                                 versions=dates, # All except current/last
 | 
			
		||||
                                 versions=dates[:-1], # All except current/last
 | 
			
		||||
                                 watch_a=watch
 | 
			
		||||
                                 )
 | 
			
		||||
 | 
			
		||||
@@ -1043,7 +985,7 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
        is_html_webdriver = False
 | 
			
		||||
        if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver' or watch.get('fetch_backend', '').startswith('extra_browser_'):
 | 
			
		||||
        if (watch.get('fetch_backend') == 'system' and system_uses_webdriver) or watch.get('fetch_backend') == 'html_webdriver':
 | 
			
		||||
            is_html_webdriver = True
 | 
			
		||||
 | 
			
		||||
        # Never requested successfully, but we detected a fetch error
 | 
			
		||||
@@ -1224,7 +1166,8 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
            # These files should be in our subdirectory
 | 
			
		||||
            try:
 | 
			
		||||
                # set nocache, set content-type
 | 
			
		||||
                response = make_response(send_from_directory(os.path.join(datastore_o.datastore_path, filename), "elements.json"))
 | 
			
		||||
                watch_dir = datastore_o.datastore_path + "/" + filename
 | 
			
		||||
                response = make_response(send_from_directory(filename="elements.json", directory=watch_dir, path=watch_dir + "/elements.json"))
 | 
			
		||||
                response.headers['Content-type'] = 'application/json'
 | 
			
		||||
                response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
 | 
			
		||||
                response.headers['Pragma'] = 'no-cache'
 | 
			
		||||
@@ -1312,8 +1255,6 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
        # Forced recheck will skip the 'skip if content is the same' rule (, 'reprocess_existing_data': True})))
 | 
			
		||||
        tag = request.args.get('tag')
 | 
			
		||||
        uuid = request.args.get('uuid')
 | 
			
		||||
        with_errors = request.args.get('with_errors') == "1"
 | 
			
		||||
 | 
			
		||||
        i = 0
 | 
			
		||||
 | 
			
		||||
        running_uuids = []
 | 
			
		||||
@@ -1329,8 +1270,6 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
            # Items that have this current tag
 | 
			
		||||
            for watch_uuid, watch in datastore.data['watching'].items():
 | 
			
		||||
                if tag in watch.get('tags', {}):
 | 
			
		||||
                    if with_errors and not watch.get('last_error'):
 | 
			
		||||
                        continue
 | 
			
		||||
                    if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
 | 
			
		||||
                        update_q.put(
 | 
			
		||||
                            queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False})
 | 
			
		||||
@@ -1341,11 +1280,8 @@ def changedetection_app(config=None, datastore_o=None):
 | 
			
		||||
            # No tag, no uuid, add everything.
 | 
			
		||||
            for watch_uuid, watch in datastore.data['watching'].items():
 | 
			
		||||
                if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
 | 
			
		||||
                    if with_errors and not watch.get('last_error'):
 | 
			
		||||
                        continue
 | 
			
		||||
                    update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': watch_uuid, 'skip_when_checksum_same': False}))
 | 
			
		||||
                    i += 1
 | 
			
		||||
 | 
			
		||||
        flash("{} watches queued for rechecking.".format(i))
 | 
			
		||||
        return redirect(url_for('index', tag=tag))
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -23,10 +23,8 @@
 | 
			
		||||
 | 
			
		||||
from distutils.util import strtobool
 | 
			
		||||
from flask import Blueprint, request, make_response
 | 
			
		||||
import logging
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
from changedetectionio.store import ChangeDetectionStore
 | 
			
		||||
from changedetectionio import login_optionally_required
 | 
			
		||||
 | 
			
		||||
@@ -46,7 +44,7 @@ def construct_blueprint(datastore: ChangeDetectionStore):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
        # We keep the playwright session open for many minutes
 | 
			
		||||
        keepalive_seconds = int(os.getenv('BROWSERSTEPS_MINUTES_KEEPALIVE', 10)) * 60
 | 
			
		||||
        seconds_keepalive = int(os.getenv('BROWSERSTEPS_MINUTES_KEEPALIVE', 10)) * 60
 | 
			
		||||
 | 
			
		||||
        browsersteps_start_session = {'start_time': time.time()}
 | 
			
		||||
 | 
			
		||||
@@ -58,18 +56,16 @@ def construct_blueprint(datastore: ChangeDetectionStore):
 | 
			
		||||
            # Start the Playwright context, which is actually a nodejs sub-process and communicates over STDIN/STDOUT pipes
 | 
			
		||||
            io_interface_context = io_interface_context.start()
 | 
			
		||||
 | 
			
		||||
        keepalive_ms = ((keepalive_seconds + 3) * 1000)
 | 
			
		||||
        base_url = os.getenv('PLAYWRIGHT_DRIVER_URL', '')
 | 
			
		||||
        a = "?" if not '?' in base_url else '&'
 | 
			
		||||
        base_url += a + f"timeout={keepalive_ms}"
 | 
			
		||||
 | 
			
		||||
        # keep it alive for 10 seconds more than we advertise, sometimes it helps to keep it shutting down cleanly
 | 
			
		||||
        keepalive = "&timeout={}".format(((seconds_keepalive + 3) * 1000))
 | 
			
		||||
        try:
 | 
			
		||||
            browsersteps_start_session['browser'] = io_interface_context.chromium.connect_over_cdp(base_url)
 | 
			
		||||
            browsersteps_start_session['browser'] = io_interface_context.chromium.connect_over_cdp(
 | 
			
		||||
                os.getenv('PLAYWRIGHT_DRIVER_URL', '') + keepalive)
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            if 'ECONNREFUSED' in str(e):
 | 
			
		||||
                return make_response('Unable to start the Playwright Browser session, is it running?', 401)
 | 
			
		||||
            else:
 | 
			
		||||
                # Other errors, bad URL syntax, bad reply etc
 | 
			
		||||
                return make_response(str(e), 401)
 | 
			
		||||
 | 
			
		||||
        proxy_id = datastore.get_preferred_proxy_for_watch(uuid=watch_uuid)
 | 
			
		||||
@@ -122,37 +118,12 @@ def construct_blueprint(datastore: ChangeDetectionStore):
 | 
			
		||||
        print("Starting connection with playwright - done")
 | 
			
		||||
        return {'browsersteps_session_id': browsersteps_session_id}
 | 
			
		||||
 | 
			
		||||
    @login_optionally_required
 | 
			
		||||
    @browser_steps_blueprint.route("/browsersteps_image", methods=['GET'])
 | 
			
		||||
    def browser_steps_fetch_screenshot_image():
 | 
			
		||||
        from flask import (
 | 
			
		||||
            make_response,
 | 
			
		||||
            request,
 | 
			
		||||
            send_from_directory,
 | 
			
		||||
        )
 | 
			
		||||
        uuid = request.args.get('uuid')
 | 
			
		||||
        step_n = int(request.args.get('step_n'))
 | 
			
		||||
 | 
			
		||||
        watch = datastore.data['watching'].get(uuid)
 | 
			
		||||
        filename = f"step_before-{step_n}.jpeg" if request.args.get('type', '') == 'before' else f"step_{step_n}.jpeg"
 | 
			
		||||
 | 
			
		||||
        if step_n and watch and os.path.isfile(os.path.join(watch.watch_data_dir, filename)):
 | 
			
		||||
            response = make_response(send_from_directory(directory=watch.watch_data_dir, path=filename))
 | 
			
		||||
            response.headers['Content-type'] = 'image/jpeg'
 | 
			
		||||
            response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
 | 
			
		||||
            response.headers['Pragma'] = 'no-cache'
 | 
			
		||||
            response.headers['Expires'] = 0
 | 
			
		||||
            return response
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            return make_response('Unable to fetch image, is the URL correct? does the watch exist? does the step_type-n.jpeg exist?', 401)
 | 
			
		||||
 | 
			
		||||
    # A request for an action was received
 | 
			
		||||
    @login_optionally_required
 | 
			
		||||
    @browser_steps_blueprint.route("/browsersteps_update", methods=['POST'])
 | 
			
		||||
    def browsersteps_ui_update():
 | 
			
		||||
        import base64
 | 
			
		||||
        import playwright._impl._errors
 | 
			
		||||
        import playwright._impl._api_types
 | 
			
		||||
        global browsersteps_sessions
 | 
			
		||||
        from changedetectionio.blueprint.browser_steps import browser_steps
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -77,13 +77,13 @@ class steppable_browser_interface():
 | 
			
		||||
    def action_goto_url(self, selector=None, value=None):
 | 
			
		||||
        # self.page.set_viewport_size({"width": 1280, "height": 5000})
 | 
			
		||||
        now = time.time()
 | 
			
		||||
        response = self.page.goto(value, timeout=0, wait_until='load')
 | 
			
		||||
        # Should be the same as the puppeteer_fetch.js methods, means, load with no timeout set (skip timeout)
 | 
			
		||||
        #and also wait for seconds ?
 | 
			
		||||
        #await page.waitForTimeout(1000);
 | 
			
		||||
        #await page.waitForTimeout(extra_wait_ms);
 | 
			
		||||
        response = self.page.goto(value, timeout=0, wait_until='commit')
 | 
			
		||||
 | 
			
		||||
        # Wait_until = commit
 | 
			
		||||
        # - `'commit'` - consider operation to be finished when network response is received and the document started loading.
 | 
			
		||||
        # Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
 | 
			
		||||
        # This seemed to solve nearly all 'TimeoutErrors'
 | 
			
		||||
        print("Time to goto URL ", time.time() - now)
 | 
			
		||||
        return response
 | 
			
		||||
 | 
			
		||||
    def action_click_element_containing_text(self, selector=None, value=''):
 | 
			
		||||
        if not len(value.strip()):
 | 
			
		||||
@@ -99,8 +99,7 @@ class steppable_browser_interface():
 | 
			
		||||
        self.page.fill(selector, value, timeout=10 * 1000)
 | 
			
		||||
 | 
			
		||||
    def action_execute_js(self, selector, value):
 | 
			
		||||
        response = self.page.evaluate(value)
 | 
			
		||||
        return response
 | 
			
		||||
        self.page.evaluate(value)
 | 
			
		||||
 | 
			
		||||
    def action_click_element(self, selector, value):
 | 
			
		||||
        print("Clicking element")
 | 
			
		||||
@@ -110,7 +109,7 @@ class steppable_browser_interface():
 | 
			
		||||
        self.page.click(selector=selector, timeout=30 * 1000, delay=randint(200, 500))
 | 
			
		||||
 | 
			
		||||
    def action_click_element_if_exists(self, selector, value):
 | 
			
		||||
        import playwright._impl._errors as _api_types
 | 
			
		||||
        import playwright._impl._api_types as _api_types
 | 
			
		||||
        print("Clicking element if exists")
 | 
			
		||||
        if not len(selector.strip()):
 | 
			
		||||
            return
 | 
			
		||||
@@ -139,13 +138,13 @@ class steppable_browser_interface():
 | 
			
		||||
    def action_wait_for_text(self, selector, value):
 | 
			
		||||
        import json
 | 
			
		||||
        v = json.dumps(value)
 | 
			
		||||
        self.page.wait_for_function(f'document.querySelector("body").innerText.includes({v});', timeout=30000)
 | 
			
		||||
        self.page.wait_for_function(f'document.querySelector("body").innerText.includes({v});', timeout=90000)
 | 
			
		||||
 | 
			
		||||
    def action_wait_for_text_in_element(self, selector, value):
 | 
			
		||||
        import json
 | 
			
		||||
        s = json.dumps(selector)
 | 
			
		||||
        v = json.dumps(value)
 | 
			
		||||
        self.page.wait_for_function(f'document.querySelector({s}).innerText.includes({v});', timeout=30000)
 | 
			
		||||
        self.page.wait_for_function(f'document.querySelector({s}).innerText.includes({v});', timeout=90000)
 | 
			
		||||
 | 
			
		||||
    # @todo - in the future make some popout interface to capture what needs to be set
 | 
			
		||||
    # https://playwright.dev/python/docs/api/class-keyboard
 | 
			
		||||
 
 | 
			
		||||
@@ -40,8 +40,8 @@ def construct_blueprint(datastore: ChangeDetectionStore):
 | 
			
		||||
        contents = ''
 | 
			
		||||
        now = time.time()
 | 
			
		||||
        try:
 | 
			
		||||
            update_handler = text_json_diff.perform_site_check(datastore=datastore, watch_uuid=uuid)
 | 
			
		||||
            update_handler.call_browser()
 | 
			
		||||
            update_handler = text_json_diff.perform_site_check(datastore=datastore)
 | 
			
		||||
            changed_detected, update_obj, contents = update_handler.run(uuid, preferred_proxy=preferred_proxy, skip_when_checksum_same=False)
 | 
			
		||||
        # title, size is len contents not len xfer
 | 
			
		||||
        except content_fetcher.Non200ErrorCodeReceived as e:
 | 
			
		||||
            if e.status_code == 404:
 | 
			
		||||
 
 | 
			
		||||
@@ -69,12 +69,11 @@ xpath://body/div/span[contains(@class, 'example-class')]",
 | 
			
		||||
                                {% endif %}
 | 
			
		||||
                            </ul>
 | 
			
		||||
                        </li>
 | 
			
		||||
                        <li>XPath - Limit text to this XPath rule, simply start with a forward-slash. To specify XPath to be used explicitly or the XPath rule starts with an XPath function: Prefix with <code>xpath:</code>
 | 
			
		||||
                        <li>XPath - Limit text to this XPath rule, simply start with a forward-slash,
 | 
			
		||||
                            <ul>
 | 
			
		||||
                                <li>Example:  <code>//*[contains(@class, 'sametext')]</code> or <code>xpath:count(//*[contains(@class, 'sametext')])</code>, <a
 | 
			
		||||
                                <li>Example:  <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
 | 
			
		||||
                                href="http://xpather.com/" target="new">test your XPath here</a></li>
 | 
			
		||||
                                <li>Example: Get all titles from an RSS feed <code>//title/text()</code></li>
 | 
			
		||||
                                <li>To use XPath1.0: Prefix with <code>xpath1:</code></li>
 | 
			
		||||
                            </ul>
 | 
			
		||||
                            </li>
 | 
			
		||||
                    </ul>
 | 
			
		||||
 
 | 
			
		||||
@@ -96,7 +96,6 @@ class Fetcher():
 | 
			
		||||
    content = None
 | 
			
		||||
    error = None
 | 
			
		||||
    fetcher_description = "No description"
 | 
			
		||||
    browser_connection_url = None
 | 
			
		||||
    headers = {}
 | 
			
		||||
    status_code = None
 | 
			
		||||
    webdriver_js_execute_code = None
 | 
			
		||||
@@ -160,19 +159,9 @@ class Fetcher():
 | 
			
		||||
        """
 | 
			
		||||
        return {k.lower(): v for k, v in self.headers.items()}
 | 
			
		||||
 | 
			
		||||
    def browser_steps_get_valid_steps(self):
 | 
			
		||||
        if self.browser_steps is not None and len(self.browser_steps):
 | 
			
		||||
            valid_steps = filter(
 | 
			
		||||
                lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
 | 
			
		||||
                self.browser_steps)
 | 
			
		||||
 | 
			
		||||
            return valid_steps
 | 
			
		||||
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    def iterate_browser_steps(self):
 | 
			
		||||
        from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface
 | 
			
		||||
        from playwright._impl._errors import TimeoutError
 | 
			
		||||
        from playwright._impl._api_types import TimeoutError
 | 
			
		||||
        from jinja2 import Environment
 | 
			
		||||
        jinja2_env = Environment(extensions=['jinja2_time.TimeExtension'])
 | 
			
		||||
 | 
			
		||||
@@ -181,7 +170,10 @@ class Fetcher():
 | 
			
		||||
        if self.browser_steps is not None and len(self.browser_steps):
 | 
			
		||||
            interface = steppable_browser_interface()
 | 
			
		||||
            interface.page = self.page
 | 
			
		||||
            valid_steps = self.browser_steps_get_valid_steps()
 | 
			
		||||
 | 
			
		||||
            valid_steps = filter(
 | 
			
		||||
                lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
 | 
			
		||||
                self.browser_steps)
 | 
			
		||||
 | 
			
		||||
            for step in valid_steps:
 | 
			
		||||
                step_n += 1
 | 
			
		||||
@@ -252,16 +244,14 @@ class base_html_playwright(Fetcher):
 | 
			
		||||
 | 
			
		||||
    proxy = None
 | 
			
		||||
 | 
			
		||||
    def __init__(self, proxy_override=None, browser_connection_url=None):
 | 
			
		||||
    def __init__(self, proxy_override=None):
 | 
			
		||||
        super().__init__()
 | 
			
		||||
 | 
			
		||||
        self.browser_type = os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').strip('"')
 | 
			
		||||
 | 
			
		||||
        # .strip('"') is going to save someone a lot of time when they accidently wrap the env value
 | 
			
		||||
        if not browser_connection_url:
 | 
			
		||||
            self.browser_connection_url = os.getenv("PLAYWRIGHT_DRIVER_URL", 'ws://playwright-chrome:3000').strip('"')
 | 
			
		||||
        else:
 | 
			
		||||
            self.browser_connection_url = browser_connection_url
 | 
			
		||||
        self.browser_type = os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').strip('"')
 | 
			
		||||
        self.command_executor = os.getenv(
 | 
			
		||||
            "PLAYWRIGHT_DRIVER_URL",
 | 
			
		||||
            'ws://playwright-chrome:3000'
 | 
			
		||||
        ).strip('"')
 | 
			
		||||
 | 
			
		||||
        # If any proxy settings are enabled, then we should setup the proxy object
 | 
			
		||||
        proxy_args = {}
 | 
			
		||||
@@ -336,8 +326,9 @@ class base_html_playwright(Fetcher):
 | 
			
		||||
            # Remove username/password if it exists in the URL or you will receive "ERR_NO_SUPPORTED_PROXIES" error
 | 
			
		||||
            # Actual authentication handled by Puppeteer/node
 | 
			
		||||
            o = urlparse(self.proxy.get('server'))
 | 
			
		||||
            proxy_url = urllib.parse.quote(o._replace(netloc="{}:{}".format(o.hostname, o.port)).geturl())
 | 
			
		||||
            browserless_function_url = f"{browserless_function_url}&--proxy-server={proxy_url}"
 | 
			
		||||
            # Remove scheme, socks5:// doesnt always work and it will autodetect anyway
 | 
			
		||||
            proxy_url = urllib.parse.quote(o._replace(netloc="{}:{}".format(o.hostname, o.port)).geturl().replace(f"{o.scheme}://", '', 1))
 | 
			
		||||
            browserless_function_url = f"{browserless_function_url}&--proxy-server={proxy_url}&dumpio=true"
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            amp = '&' if '?' in browserless_function_url else '?'
 | 
			
		||||
@@ -422,7 +413,11 @@ class base_html_playwright(Fetcher):
 | 
			
		||||
            is_binary=False):
 | 
			
		||||
 | 
			
		||||
        # For now, USE_EXPERIMENTAL_PUPPETEER_FETCH is not supported by watches with BrowserSteps (for now!)
 | 
			
		||||
        if not self.browser_steps and os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH'):
 | 
			
		||||
        has_browser_steps = self.browser_steps and list(filter(
 | 
			
		||||
                lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
 | 
			
		||||
                self.browser_steps))
 | 
			
		||||
 | 
			
		||||
        if not has_browser_steps and os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH'):
 | 
			
		||||
            if strtobool(os.getenv('USE_EXPERIMENTAL_PUPPETEER_FETCH')):
 | 
			
		||||
                # Temporary backup solution until we rewrite the playwright code
 | 
			
		||||
                return self.run_fetch_browserless_puppeteer(
 | 
			
		||||
@@ -436,7 +431,7 @@ class base_html_playwright(Fetcher):
 | 
			
		||||
                    is_binary)
 | 
			
		||||
 | 
			
		||||
        from playwright.sync_api import sync_playwright
 | 
			
		||||
        import playwright._impl._errors
 | 
			
		||||
        import playwright._impl._api_types
 | 
			
		||||
 | 
			
		||||
        self.delete_browser_steps_screenshots()
 | 
			
		||||
        response = None
 | 
			
		||||
@@ -447,7 +442,7 @@ class base_html_playwright(Fetcher):
 | 
			
		||||
            # Seemed to cause a connection Exception even tho I can see it connect
 | 
			
		||||
            # self.browser = browser_type.connect(self.command_executor, timeout=timeout*1000)
 | 
			
		||||
            # 60,000 connection timeout only
 | 
			
		||||
            browser = browser_type.connect_over_cdp(self.browser_connection_url, timeout=60000)
 | 
			
		||||
            browser = browser_type.connect_over_cdp(self.command_executor, timeout=60000)
 | 
			
		||||
 | 
			
		||||
            # SOCKS5 with authentication is not supported (yet)
 | 
			
		||||
            # https://github.com/microsoft/playwright/issues/10567
 | 
			
		||||
@@ -469,27 +464,40 @@ class base_html_playwright(Fetcher):
 | 
			
		||||
            if len(request_headers):
 | 
			
		||||
                context.set_extra_http_headers(request_headers)
 | 
			
		||||
 | 
			
		||||
            # Listen for all console events and handle errors
 | 
			
		||||
            self.page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}"))
 | 
			
		||||
                self.page.set_default_navigation_timeout(90000)
 | 
			
		||||
                self.page.set_default_timeout(90000)
 | 
			
		||||
 | 
			
		||||
            # Re-use as much code from browser steps as possible so its the same
 | 
			
		||||
            from changedetectionio.blueprint.browser_steps.browser_steps import steppable_browser_interface
 | 
			
		||||
            browsersteps_interface = steppable_browser_interface()
 | 
			
		||||
            browsersteps_interface.page = self.page
 | 
			
		||||
                # Listen for all console events and handle errors
 | 
			
		||||
                self.page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}"))
 | 
			
		||||
 | 
			
		||||
            response = browsersteps_interface.action_goto_url(value=url)
 | 
			
		||||
            self.headers = response.all_headers()
 | 
			
		||||
 | 
			
		||||
            if response is None:
 | 
			
		||||
            # Goto page
 | 
			
		||||
            try:
 | 
			
		||||
                # Wait_until = commit
 | 
			
		||||
                # - `'commit'` - consider operation to be finished when network response is received and the document started loading.
 | 
			
		||||
                # Better to not use any smarts from Playwright and just wait an arbitrary number of seconds
 | 
			
		||||
                # This seemed to solve nearly all 'TimeoutErrors'
 | 
			
		||||
                response = self.page.goto(url, wait_until='commit')
 | 
			
		||||
            except playwright._impl._api_types.Error as e:
 | 
			
		||||
                # Retry once - https://github.com/browserless/chrome/issues/2485
 | 
			
		||||
                # Sometimes errors related to invalid cert's and other can be random
 | 
			
		||||
                print("Content Fetcher > retrying request got error - ", str(e))
 | 
			
		||||
                time.sleep(1)
 | 
			
		||||
                response = self.page.goto(url, wait_until='commit')
 | 
			
		||||
            except Exception as e:
 | 
			
		||||
                print("Content Fetcher > Other exception when page.goto", str(e))
 | 
			
		||||
                context.close()
 | 
			
		||||
                browser.close()
 | 
			
		||||
                print("Content Fetcher > Response object was none")
 | 
			
		||||
                raise EmptyReply(url=url, status_code=None)
 | 
			
		||||
                raise PageUnloadable(url=url, status_code=None, message=str(e))
 | 
			
		||||
 | 
			
		||||
            # Execute any browser steps
 | 
			
		||||
            try:
 | 
			
		||||
                extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
 | 
			
		||||
                self.page.wait_for_timeout(extra_wait * 1000)
 | 
			
		||||
 | 
			
		||||
                if self.webdriver_js_execute_code is not None and len(self.webdriver_js_execute_code):
 | 
			
		||||
                    browsersteps_interface.action_execute_js(value=self.webdriver_js_execute_code, selector=None)
 | 
			
		||||
            except playwright._impl._errors.TimeoutError as e:
 | 
			
		||||
                    self.page.evaluate(self.webdriver_js_execute_code)
 | 
			
		||||
 | 
			
		||||
            except playwright._impl._api_types.TimeoutError as e:
 | 
			
		||||
                context.close()
 | 
			
		||||
                browser.close()
 | 
			
		||||
                # This can be ok, we will try to grab what we could retrieve
 | 
			
		||||
@@ -500,30 +508,28 @@ class base_html_playwright(Fetcher):
 | 
			
		||||
                browser.close()
 | 
			
		||||
                raise PageUnloadable(url=url, status_code=None, message=str(e))
 | 
			
		||||
 | 
			
		||||
            if response is None:
 | 
			
		||||
                context.close()
 | 
			
		||||
                browser.close()
 | 
			
		||||
                print("Content Fetcher > Response object was none")
 | 
			
		||||
                raise EmptyReply(url=url, status_code=None)
 | 
			
		||||
 | 
			
		||||
            # Run Browser Steps here
 | 
			
		||||
            self.iterate_browser_steps()
 | 
			
		||||
 | 
			
		||||
            extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
 | 
			
		||||
            self.page.wait_for_timeout(extra_wait * 1000)
 | 
			
		||||
 | 
			
		||||
            time.sleep(extra_wait)
 | 
			
		||||
 | 
			
		||||
            self.content = self.page.content()
 | 
			
		||||
            self.status_code = response.status
 | 
			
		||||
 | 
			
		||||
            if self.status_code != 200 and not ignore_status_codes:
 | 
			
		||||
 | 
			
		||||
                screenshot=self.page.screenshot(type='jpeg', full_page=True,
 | 
			
		||||
                                     quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)))
 | 
			
		||||
 | 
			
		||||
                raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, screenshot=screenshot)
 | 
			
		||||
 | 
			
		||||
            if len(self.page.content().strip()) == 0:
 | 
			
		||||
                context.close()
 | 
			
		||||
                browser.close()
 | 
			
		||||
                print("Content Fetcher > Content was empty")
 | 
			
		||||
                raise EmptyReply(url=url, status_code=response.status)
 | 
			
		||||
 | 
			
		||||
            # Run Browser Steps here
 | 
			
		||||
            if self.browser_steps_get_valid_steps():
 | 
			
		||||
                self.iterate_browser_steps()
 | 
			
		||||
                
 | 
			
		||||
            self.page.wait_for_timeout(extra_wait * 1000)
 | 
			
		||||
            self.status_code = response.status
 | 
			
		||||
            self.headers = response.all_headers()
 | 
			
		||||
 | 
			
		||||
            # So we can find an element on the page where its selector was entered manually (maybe not xPath etc)
 | 
			
		||||
            if current_include_filters is not None:
 | 
			
		||||
@@ -535,7 +541,6 @@ class base_html_playwright(Fetcher):
 | 
			
		||||
                "async () => {" + self.xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors) + "}")
 | 
			
		||||
            self.instock_data = self.page.evaluate("async () => {" + self.instock_data_js + "}")
 | 
			
		||||
 | 
			
		||||
            self.content = self.page.content()
 | 
			
		||||
            # Bug 3 in Playwright screenshot handling
 | 
			
		||||
            # Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it
 | 
			
		||||
            # JPEG is better here because the screenshots can be very very large
 | 
			
		||||
@@ -550,7 +555,7 @@ class base_html_playwright(Fetcher):
 | 
			
		||||
            except Exception as e:
 | 
			
		||||
                context.close()
 | 
			
		||||
                browser.close()
 | 
			
		||||
                raise ScreenshotUnavailable(url=url, status_code=response.status_code)
 | 
			
		||||
                raise ScreenshotUnavailable(url=url, status_code=None)
 | 
			
		||||
 | 
			
		||||
            context.close()
 | 
			
		||||
            browser.close()
 | 
			
		||||
@@ -562,6 +567,8 @@ class base_html_webdriver(Fetcher):
 | 
			
		||||
    else:
 | 
			
		||||
        fetcher_description = "WebDriver Chrome/Javascript"
 | 
			
		||||
 | 
			
		||||
    command_executor = ''
 | 
			
		||||
 | 
			
		||||
    # Configs for Proxy setup
 | 
			
		||||
    # In the ENV vars, is prefixed with "webdriver_", so it is for example "webdriver_sslProxy"
 | 
			
		||||
    selenium_proxy_settings_mappings = ['proxyType', 'ftpProxy', 'httpProxy', 'noProxy',
 | 
			
		||||
@@ -569,15 +576,12 @@ class base_html_webdriver(Fetcher):
 | 
			
		||||
                                        'socksProxy', 'socksVersion', 'socksUsername', 'socksPassword']
 | 
			
		||||
    proxy = None
 | 
			
		||||
 | 
			
		||||
    def __init__(self, proxy_override=None, browser_connection_url=None):
 | 
			
		||||
    def __init__(self, proxy_override=None):
 | 
			
		||||
        super().__init__()
 | 
			
		||||
        from selenium.webdriver.common.proxy import Proxy as SeleniumProxy
 | 
			
		||||
 | 
			
		||||
        # .strip('"') is going to save someone a lot of time when they accidently wrap the env value
 | 
			
		||||
        if not browser_connection_url:
 | 
			
		||||
            self.browser_connection_url = os.getenv("WEBDRIVER_URL", 'http://browser-chrome:4444/wd/hub').strip('"')
 | 
			
		||||
        else:
 | 
			
		||||
            self.browser_connection_url = browser_connection_url
 | 
			
		||||
        self.command_executor = os.getenv("WEBDRIVER_URL", 'http://browser-chrome:4444/wd/hub').strip('"')
 | 
			
		||||
 | 
			
		||||
        # If any proxy settings are enabled, then we should setup the proxy object
 | 
			
		||||
        proxy_args = {}
 | 
			
		||||
@@ -610,17 +614,14 @@ class base_html_webdriver(Fetcher):
 | 
			
		||||
            is_binary=False):
 | 
			
		||||
 | 
			
		||||
        from selenium import webdriver
 | 
			
		||||
        from selenium.webdriver.chrome.options import Options as ChromeOptions
 | 
			
		||||
        from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
 | 
			
		||||
        from selenium.common.exceptions import WebDriverException
 | 
			
		||||
        # request_body, request_method unused for now, until some magic in the future happens.
 | 
			
		||||
 | 
			
		||||
        options = ChromeOptions()
 | 
			
		||||
        if self.proxy:
 | 
			
		||||
            options.proxy = self.proxy
 | 
			
		||||
 | 
			
		||||
        self.driver = webdriver.Remote(
 | 
			
		||||
            command_executor=self.browser_connection_url,
 | 
			
		||||
            options=options)
 | 
			
		||||
            command_executor=self.command_executor,
 | 
			
		||||
            desired_capabilities=DesiredCapabilities.CHROME,
 | 
			
		||||
            proxy=self.proxy)
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            self.driver.get(url)
 | 
			
		||||
@@ -652,11 +653,11 @@ class base_html_webdriver(Fetcher):
 | 
			
		||||
    # Does the connection to the webdriver work? run a test connection.
 | 
			
		||||
    def is_ready(self):
 | 
			
		||||
        from selenium import webdriver
 | 
			
		||||
        from selenium.webdriver.chrome.options import Options as ChromeOptions
 | 
			
		||||
        from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
 | 
			
		||||
 | 
			
		||||
        self.driver = webdriver.Remote(
 | 
			
		||||
            command_executor=self.command_executor,
 | 
			
		||||
            options=ChromeOptions())
 | 
			
		||||
            desired_capabilities=DesiredCapabilities.CHROME)
 | 
			
		||||
 | 
			
		||||
        # driver.quit() seems to cause better exceptions
 | 
			
		||||
        self.quit()
 | 
			
		||||
@@ -674,10 +675,8 @@ class base_html_webdriver(Fetcher):
 | 
			
		||||
class html_requests(Fetcher):
 | 
			
		||||
    fetcher_description = "Basic fast Plaintext/HTTP Client"
 | 
			
		||||
 | 
			
		||||
    def __init__(self, proxy_override=None, browser_connection_url=None):
 | 
			
		||||
        super().__init__()
 | 
			
		||||
    def __init__(self, proxy_override=None):
 | 
			
		||||
        self.proxy_override = proxy_override
 | 
			
		||||
        # browser_connection_url is none because its always 'launched locally'
 | 
			
		||||
 | 
			
		||||
    def run(self,
 | 
			
		||||
            url,
 | 
			
		||||
 
 | 
			
		||||
@@ -15,20 +15,14 @@ from wtforms import (
 | 
			
		||||
    validators,
 | 
			
		||||
    widgets
 | 
			
		||||
)
 | 
			
		||||
from flask_wtf.file import FileField, FileAllowed
 | 
			
		||||
from wtforms.fields import FieldList
 | 
			
		||||
 | 
			
		||||
from wtforms.validators import ValidationError
 | 
			
		||||
 | 
			
		||||
from validators.url import url as url_validator
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# default
 | 
			
		||||
# each select <option data-enabled="enabled-0-0"
 | 
			
		||||
from changedetectionio.blueprint.browser_steps.browser_steps import browser_step_ui_config
 | 
			
		||||
 | 
			
		||||
from changedetectionio import content_fetcher, html_tools
 | 
			
		||||
 | 
			
		||||
from changedetectionio import content_fetcher
 | 
			
		||||
from changedetectionio.notification import (
 | 
			
		||||
    valid_notification_formats,
 | 
			
		||||
)
 | 
			
		||||
@@ -46,7 +40,7 @@ valid_method = {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
default_method = 'GET'
 | 
			
		||||
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class StringListField(StringField):
 | 
			
		||||
    widget = widgets.TextArea()
 | 
			
		||||
@@ -168,9 +162,7 @@ class ValidateContentFetcherIsReady(object):
 | 
			
		||||
    def __call__(self, form, field):
 | 
			
		||||
        import urllib3.exceptions
 | 
			
		||||
        from changedetectionio import content_fetcher
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
# AttributeError: module 'changedetectionio.content_fetcher' has no attribute 'extra_browser_unlocked<>ASDF213r123r'
 | 
			
		||||
        # Better would be a radiohandler that keeps a reference to each class
 | 
			
		||||
        if field.data is not None and field.data != 'system':
 | 
			
		||||
            klass = getattr(content_fetcher, field.data)
 | 
			
		||||
@@ -268,23 +260,19 @@ class validateURL(object):
 | 
			
		||||
        self.message = message
 | 
			
		||||
 | 
			
		||||
    def __call__(self, form, field):
 | 
			
		||||
        # This should raise a ValidationError() or not
 | 
			
		||||
        validate_url(field.data)
 | 
			
		||||
        import validators
 | 
			
		||||
        # If hosts that only contain alphanumerics are allowed ("localhost" for example)
 | 
			
		||||
        allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
 | 
			
		||||
        try:
 | 
			
		||||
            validators.url(field.data.strip(), simple_host=allow_simplehost)
 | 
			
		||||
        except validators.ValidationFailure:
 | 
			
		||||
            message = field.gettext('\'%s\' is not a valid URL.' % (field.data.strip()))
 | 
			
		||||
            raise ValidationError(message)
 | 
			
		||||
 | 
			
		||||
def validate_url(test_url):
 | 
			
		||||
    # If hosts that only contain alphanumerics are allowed ("localhost" for example)
 | 
			
		||||
    try:
 | 
			
		||||
        url_validator(test_url, simple_host=allow_simplehost)
 | 
			
		||||
    except validators.ValidationError:
 | 
			
		||||
        #@todo check for xss
 | 
			
		||||
        message = f"'{test_url}' is not a valid URL."
 | 
			
		||||
        # This should be wtforms.validators.
 | 
			
		||||
        raise ValidationError(message)
 | 
			
		||||
        from .model.Watch import is_safe_url
 | 
			
		||||
        if not is_safe_url(field.data):
 | 
			
		||||
            raise ValidationError('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX')
 | 
			
		||||
 | 
			
		||||
    from .model.Watch import is_safe_url
 | 
			
		||||
    if not is_safe_url(test_url):
 | 
			
		||||
        # This should be wtforms.validators.
 | 
			
		||||
        raise ValidationError('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX or incorrect URL format')
 | 
			
		||||
 | 
			
		||||
class ValidateListRegex(object):
 | 
			
		||||
    """
 | 
			
		||||
@@ -296,10 +284,11 @@ class ValidateListRegex(object):
 | 
			
		||||
    def __call__(self, form, field):
 | 
			
		||||
 | 
			
		||||
        for line in field.data:
 | 
			
		||||
            if re.search(html_tools.PERL_STYLE_REGEX, line, re.IGNORECASE):
 | 
			
		||||
            if line[0] == '/' and line[-1] == '/':
 | 
			
		||||
                # Because internally we dont wrap in /
 | 
			
		||||
                line = line.strip('/')
 | 
			
		||||
                try:
 | 
			
		||||
                    regex = html_tools.perl_style_slash_enclosed_regex_to_options(line)
 | 
			
		||||
                    re.compile(regex)
 | 
			
		||||
                    re.compile(line)
 | 
			
		||||
                except re.error:
 | 
			
		||||
                    message = field.gettext('RegEx \'%s\' is not a valid regular expression.')
 | 
			
		||||
                    raise ValidationError(message % (line))
 | 
			
		||||
@@ -328,30 +317,11 @@ class ValidateCSSJSONXPATHInput(object):
 | 
			
		||||
                return
 | 
			
		||||
 | 
			
		||||
            # Does it look like XPath?
 | 
			
		||||
            if line.strip()[0] == '/' or line.strip().startswith('xpath:'):
 | 
			
		||||
                if not self.allow_xpath:
 | 
			
		||||
                    raise ValidationError("XPath not permitted in this field!")
 | 
			
		||||
                from lxml import etree, html
 | 
			
		||||
                import elementpath
 | 
			
		||||
                # xpath 2.0-3.1
 | 
			
		||||
                from elementpath.xpath3 import XPath3Parser
 | 
			
		||||
                tree = html.fromstring("<html></html>")
 | 
			
		||||
                line = line.replace('xpath:', '')
 | 
			
		||||
 | 
			
		||||
                try:
 | 
			
		||||
                    elementpath.select(tree, line.strip(), parser=XPath3Parser)
 | 
			
		||||
                except elementpath.ElementPathError as e:
 | 
			
		||||
                    message = field.gettext('\'%s\' is not a valid XPath expression. (%s)')
 | 
			
		||||
                    raise ValidationError(message % (line, str(e)))
 | 
			
		||||
                except:
 | 
			
		||||
                    raise ValidationError("A system-error occurred when validating your XPath expression")
 | 
			
		||||
 | 
			
		||||
            if line.strip().startswith('xpath1:'):
 | 
			
		||||
            if line.strip()[0] == '/':
 | 
			
		||||
                if not self.allow_xpath:
 | 
			
		||||
                    raise ValidationError("XPath not permitted in this field!")
 | 
			
		||||
                from lxml import etree, html
 | 
			
		||||
                tree = html.fromstring("<html></html>")
 | 
			
		||||
                line = re.sub(r'^xpath1:', '', line)
 | 
			
		||||
 | 
			
		||||
                try:
 | 
			
		||||
                    tree.xpath(line.strip())
 | 
			
		||||
@@ -428,9 +398,6 @@ class importForm(Form):
 | 
			
		||||
    from . import processors
 | 
			
		||||
    processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff")
 | 
			
		||||
    urls = TextAreaField('URLs')
 | 
			
		||||
    xlsx_file = FileField('Upload .xlsx file', validators=[FileAllowed(['xlsx'], 'Must be .xlsx file!')])
 | 
			
		||||
    file_mapping = SelectField('File mapping', [validators.DataRequired()], choices={('wachete', 'Wachete mapping'), ('custom','Custom mapping')})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class SingleBrowserStep(Form):
 | 
			
		||||
 | 
			
		||||
@@ -517,12 +484,6 @@ class SingleExtraProxy(Form):
 | 
			
		||||
    proxy_url = StringField('Proxy URL', [validators.Optional()], render_kw={"placeholder": "socks5:// or regular proxy http://user:pass@...:3128", "size":50})
 | 
			
		||||
    # @todo do the validation here instead
 | 
			
		||||
 | 
			
		||||
class SingleExtraBrowser(Form):
 | 
			
		||||
    browser_name = StringField('Name', [validators.Optional()], render_kw={"placeholder": "Name"})
 | 
			
		||||
    browser_connection_url = StringField('Browser connection URL', [validators.Optional()], render_kw={"placeholder": "wss://brightdata... wss://oxylabs etc", "size":50})
 | 
			
		||||
    # @todo do the validation here instead
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# datastore.data['settings']['requests']..
 | 
			
		||||
class globalSettingsRequestForm(Form):
 | 
			
		||||
    time_between_check = FormField(TimeBetweenCheckForm)
 | 
			
		||||
@@ -531,7 +492,6 @@ class globalSettingsRequestForm(Form):
 | 
			
		||||
                                  render_kw={"style": "width: 5em;"},
 | 
			
		||||
                                  validators=[validators.NumberRange(min=0, message="Should contain zero or more seconds")])
 | 
			
		||||
    extra_proxies = FieldList(FormField(SingleExtraProxy), min_entries=5)
 | 
			
		||||
    extra_browsers = FieldList(FormField(SingleExtraBrowser), min_entries=5)
 | 
			
		||||
 | 
			
		||||
    def validate_extra_proxies(self, extra_validators=None):
 | 
			
		||||
        for e in self.data['extra_proxies']:
 | 
			
		||||
 
 | 
			
		||||
@@ -1,12 +1,9 @@
 | 
			
		||||
 | 
			
		||||
from bs4 import BeautifulSoup
 | 
			
		||||
from inscriptis import get_text
 | 
			
		||||
from inscriptis.model.config import ParserConfig
 | 
			
		||||
from jsonpath_ng.ext import parse
 | 
			
		||||
from typing import List
 | 
			
		||||
from inscriptis.css_profiles import CSS_PROFILES, HtmlElement
 | 
			
		||||
from inscriptis.html_properties import Display
 | 
			
		||||
from inscriptis.model.config import ParserConfig
 | 
			
		||||
from xml.sax.saxutils import escape as xml_escape
 | 
			
		||||
import json
 | 
			
		||||
import re
 | 
			
		||||
 | 
			
		||||
@@ -69,96 +66,12 @@ def element_removal(selectors: List[str], html_content):
 | 
			
		||||
    selector = ",".join(selectors)
 | 
			
		||||
    return subtractive_css_selector(selector, html_content)
 | 
			
		||||
 | 
			
		||||
def elementpath_tostring(obj):
 | 
			
		||||
    """
 | 
			
		||||
    change elementpath.select results to string type
 | 
			
		||||
    # The MIT License (MIT), Copyright (c), 2018-2021, SISSA (Scuola Internazionale Superiore di Studi Avanzati)
 | 
			
		||||
    # https://github.com/sissaschool/elementpath/blob/dfcc2fd3d6011b16e02bf30459a7924f547b47d0/elementpath/xpath_tokens.py#L1038
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    import elementpath
 | 
			
		||||
    from decimal import Decimal
 | 
			
		||||
    import math
 | 
			
		||||
 | 
			
		||||
    if obj is None:
 | 
			
		||||
        return ''
 | 
			
		||||
    # https://elementpath.readthedocs.io/en/latest/xpath_api.html#elementpath.select
 | 
			
		||||
    elif isinstance(obj, elementpath.XPathNode):
 | 
			
		||||
        return obj.string_value
 | 
			
		||||
    elif isinstance(obj, bool):
 | 
			
		||||
        return 'true' if obj else 'false'
 | 
			
		||||
    elif isinstance(obj, Decimal):
 | 
			
		||||
        value = format(obj, 'f')
 | 
			
		||||
        if '.' in value:
 | 
			
		||||
            return value.rstrip('0').rstrip('.')
 | 
			
		||||
        return value
 | 
			
		||||
 | 
			
		||||
    elif isinstance(obj, float):
 | 
			
		||||
        if math.isnan(obj):
 | 
			
		||||
            return 'NaN'
 | 
			
		||||
        elif math.isinf(obj):
 | 
			
		||||
            return str(obj).upper()
 | 
			
		||||
 | 
			
		||||
        value = str(obj)
 | 
			
		||||
        if '.' in value:
 | 
			
		||||
            value = value.rstrip('0').rstrip('.')
 | 
			
		||||
        if '+' in value:
 | 
			
		||||
            value = value.replace('+', '')
 | 
			
		||||
        if 'e' in value:
 | 
			
		||||
            return value.upper()
 | 
			
		||||
        return value
 | 
			
		||||
 | 
			
		||||
    return str(obj)
 | 
			
		||||
 | 
			
		||||
# Return str Utf-8 of matched rules
 | 
			
		||||
def xpath_filter(xpath_filter, html_content, append_pretty_line_formatting=False, is_rss=False):
 | 
			
		||||
    from lxml import etree, html
 | 
			
		||||
    import elementpath
 | 
			
		||||
    # xpath 2.0-3.1
 | 
			
		||||
    from elementpath.xpath3 import XPath3Parser
 | 
			
		||||
 | 
			
		||||
    parser = etree.HTMLParser()
 | 
			
		||||
    if is_rss:
 | 
			
		||||
        # So that we can keep CDATA for cdata_in_document_to_text() to process
 | 
			
		||||
        parser = etree.XMLParser(strip_cdata=False)
 | 
			
		||||
 | 
			
		||||
    tree = html.fromstring(bytes(html_content, encoding='utf-8'), parser=parser)
 | 
			
		||||
    html_block = ""
 | 
			
		||||
 | 
			
		||||
    r = elementpath.select(tree, xpath_filter.strip(), namespaces={'re': 'http://exslt.org/regular-expressions'}, parser=XPath3Parser)
 | 
			
		||||
    #@note: //title/text() wont work where <title>CDATA..
 | 
			
		||||
 | 
			
		||||
    if type(r) != list:
 | 
			
		||||
        r = [r]
 | 
			
		||||
 | 
			
		||||
    for element in r:
 | 
			
		||||
        # When there's more than 1 match, then add the suffix to separate each line
 | 
			
		||||
        # And where the matched result doesn't include something that will cause Inscriptis to add a newline
 | 
			
		||||
        # (This way each 'match' reliably has a new-line in the diff)
 | 
			
		||||
        # Divs are converted to 4 whitespaces by inscriptis
 | 
			
		||||
        if append_pretty_line_formatting and len(html_block) and (not hasattr( element, 'tag' ) or not element.tag in (['br', 'hr', 'div', 'p'])):
 | 
			
		||||
            html_block += TEXT_FILTER_LIST_LINE_SUFFIX
 | 
			
		||||
 | 
			
		||||
        if type(element) == str:
 | 
			
		||||
            html_block += element
 | 
			
		||||
        elif issubclass(type(element), etree._Element) or issubclass(type(element), etree._ElementTree):
 | 
			
		||||
            html_block += etree.tostring(element, pretty_print=True).decode('utf-8')
 | 
			
		||||
        else:
 | 
			
		||||
            html_block += elementpath_tostring(element)
 | 
			
		||||
 | 
			
		||||
    return html_block
 | 
			
		||||
 | 
			
		||||
# Return str Utf-8 of matched rules
 | 
			
		||||
# 'xpath1:'
 | 
			
		||||
def xpath1_filter(xpath_filter, html_content, append_pretty_line_formatting=False, is_rss=False):
 | 
			
		||||
def xpath_filter(xpath_filter, html_content, append_pretty_line_formatting=False):
 | 
			
		||||
    from lxml import etree, html
 | 
			
		||||
 | 
			
		||||
    parser = None
 | 
			
		||||
    if is_rss:
 | 
			
		||||
        # So that we can keep CDATA for cdata_in_document_to_text() to process
 | 
			
		||||
        parser = etree.XMLParser(strip_cdata=False)
 | 
			
		||||
 | 
			
		||||
    tree = html.fromstring(bytes(html_content, encoding='utf-8'), parser=parser)
 | 
			
		||||
    tree = html.fromstring(bytes(html_content, encoding='utf-8'))
 | 
			
		||||
    html_block = ""
 | 
			
		||||
 | 
			
		||||
    r = tree.xpath(xpath_filter.strip(), namespaces={'re': 'http://exslt.org/regular-expressions'})
 | 
			
		||||
@@ -181,6 +94,7 @@ def xpath1_filter(xpath_filter, html_content, append_pretty_line_formatting=Fals
 | 
			
		||||
 | 
			
		||||
    return html_block
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Extract/find element
 | 
			
		||||
def extract_element(find='title', html_content=''):
 | 
			
		||||
 | 
			
		||||
@@ -346,15 +260,8 @@ def strip_ignore_text(content, wordlist, mode="content"):
 | 
			
		||||
 | 
			
		||||
    return "\n".encode('utf8').join(output)
 | 
			
		||||
 | 
			
		||||
def cdata_in_document_to_text(html_content: str, render_anchor_tag_content=False) -> str:
 | 
			
		||||
    pattern = '<!\[CDATA\[(\s*(?:.(?<!\]\]>)\s*)*)\]\]>'
 | 
			
		||||
    def repl(m):
 | 
			
		||||
        text = m.group(1)
 | 
			
		||||
        return xml_escape(html_to_text(html_content=text)).strip()
 | 
			
		||||
 | 
			
		||||
    return re.sub(pattern, repl, html_content)
 | 
			
		||||
 | 
			
		||||
def html_to_text(html_content: str, render_anchor_tag_content=False, is_rss=False) -> str:
 | 
			
		||||
def html_to_text(html_content: str, render_anchor_tag_content=False) -> str:
 | 
			
		||||
    """Converts html string to a string with just the text. If ignoring
 | 
			
		||||
    rendering anchor tag content is enable, anchor tag content are also
 | 
			
		||||
    included in the text
 | 
			
		||||
@@ -370,21 +277,16 @@ def html_to_text(html_content: str, render_anchor_tag_content=False, is_rss=Fals
 | 
			
		||||
    #  if anchor tag content flag is set to True define a config for
 | 
			
		||||
    #  extracting this content
 | 
			
		||||
    if render_anchor_tag_content:
 | 
			
		||||
 | 
			
		||||
        parser_config = ParserConfig(
 | 
			
		||||
            annotation_rules={"a": ["hyperlink"]},
 | 
			
		||||
            display_links=True
 | 
			
		||||
            annotation_rules={"a": ["hyperlink"]}, display_links=True
 | 
			
		||||
        )
 | 
			
		||||
    # otherwise set config to None/default
 | 
			
		||||
 | 
			
		||||
    # otherwise set config to None
 | 
			
		||||
    else:
 | 
			
		||||
        parser_config = None
 | 
			
		||||
 | 
			
		||||
    # RSS Mode - Inscriptis will treat `title` as something else.
 | 
			
		||||
    # Make it as a regular block display element (//item/title)
 | 
			
		||||
    # This is a bit of a hack - the real way it to use XSLT to convert it to HTML #1874
 | 
			
		||||
    if is_rss:
 | 
			
		||||
        html_content = re.sub(r'<title([\s>])', r'<h1\1', html_content)
 | 
			
		||||
        html_content = re.sub(r'</title>', r'</h1>', html_content)
 | 
			
		||||
 | 
			
		||||
    # get text and annotations via inscriptis
 | 
			
		||||
    text_content = get_text(html_content, config=parser_config)
 | 
			
		||||
 | 
			
		||||
    return text_content
 | 
			
		||||
 
 | 
			
		||||
@@ -1,9 +1,6 @@
 | 
			
		||||
from abc import ABC, abstractmethod
 | 
			
		||||
import time
 | 
			
		||||
import validators
 | 
			
		||||
from wtforms import ValidationError
 | 
			
		||||
 | 
			
		||||
from changedetectionio.forms import validate_url
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Importer():
 | 
			
		||||
@@ -15,7 +12,6 @@ class Importer():
 | 
			
		||||
        self.new_uuids = []
 | 
			
		||||
        self.good = 0
 | 
			
		||||
        self.remaining_data = []
 | 
			
		||||
        self.import_profile = None
 | 
			
		||||
 | 
			
		||||
    @abstractmethod
 | 
			
		||||
    def run(self,
 | 
			
		||||
@@ -136,167 +132,3 @@ class import_distill_io_json(Importer):
 | 
			
		||||
                    good += 1
 | 
			
		||||
 | 
			
		||||
        flash("{} Imported from Distill.io in {:.2f}s, {} Skipped.".format(len(self.new_uuids), time.time() - now, len(self.remaining_data)))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class import_xlsx_wachete(Importer):
 | 
			
		||||
 | 
			
		||||
    def run(self,
 | 
			
		||||
            data,
 | 
			
		||||
            flash,
 | 
			
		||||
            datastore,
 | 
			
		||||
            ):
 | 
			
		||||
 | 
			
		||||
        good = 0
 | 
			
		||||
        now = time.time()
 | 
			
		||||
        self.new_uuids = []
 | 
			
		||||
 | 
			
		||||
        from openpyxl import load_workbook
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            wb = load_workbook(data)
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            # @todo correct except
 | 
			
		||||
            flash("Unable to read export XLSX file, something wrong with the file?", 'error')
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        row_id = 2
 | 
			
		||||
        for row in wb.active.iter_rows(min_row=row_id):
 | 
			
		||||
            try:
 | 
			
		||||
                extras = {}
 | 
			
		||||
                data = {}
 | 
			
		||||
                for cell in row:
 | 
			
		||||
                    if not cell.value:
 | 
			
		||||
                        continue
 | 
			
		||||
                    column_title = wb.active.cell(row=1, column=cell.column).value.strip().lower()
 | 
			
		||||
                    data[column_title] = cell.value
 | 
			
		||||
 | 
			
		||||
                # Forced switch to webdriver/playwright/etc
 | 
			
		||||
                dynamic_wachet = str(data.get('dynamic wachet', '')).strip().lower()  # Convert bool to str to cover all cases
 | 
			
		||||
                # libreoffice and others can have it as =FALSE() =TRUE(), or bool(true)
 | 
			
		||||
                if 'true' in dynamic_wachet or dynamic_wachet == '1':
 | 
			
		||||
                    extras['fetch_backend'] = 'html_webdriver'
 | 
			
		||||
                elif 'false' in dynamic_wachet or dynamic_wachet == '0':
 | 
			
		||||
                    extras['fetch_backend'] = 'html_requests'
 | 
			
		||||
 | 
			
		||||
                if data.get('xpath'):
 | 
			
		||||
                    # @todo split by || ?
 | 
			
		||||
                    extras['include_filters'] = [data.get('xpath')]
 | 
			
		||||
                if data.get('name'):
 | 
			
		||||
                    extras['title'] = data.get('name').strip()
 | 
			
		||||
                if data.get('interval (min)'):
 | 
			
		||||
                    minutes = int(data.get('interval (min)'))
 | 
			
		||||
                    hours, minutes = divmod(minutes, 60)
 | 
			
		||||
                    days, hours = divmod(hours, 24)
 | 
			
		||||
                    weeks, days = divmod(days, 7)
 | 
			
		||||
                    extras['time_between_check'] = {'weeks': weeks, 'days': days, 'hours': hours, 'minutes': minutes, 'seconds': 0}
 | 
			
		||||
 | 
			
		||||
                # At minimum a URL is required.
 | 
			
		||||
                if data.get('url'):
 | 
			
		||||
                    try:
 | 
			
		||||
                        validate_url(data.get('url'))
 | 
			
		||||
                    except ValidationError as e:
 | 
			
		||||
                        print(">> import URL error", data.get('url'), str(e))
 | 
			
		||||
                        flash(f"Error processing row number {row_id}, URL value was incorrect, row was skipped.", 'error')
 | 
			
		||||
                        # Don't bother processing anything else on this row
 | 
			
		||||
                        continue
 | 
			
		||||
 | 
			
		||||
                    new_uuid = datastore.add_watch(url=data['url'].strip(),
 | 
			
		||||
                                                   extras=extras,
 | 
			
		||||
                                                   tag=data.get('folder'),
 | 
			
		||||
                                                   write_to_disk_now=False)
 | 
			
		||||
                    if new_uuid:
 | 
			
		||||
                        # Straight into the queue.
 | 
			
		||||
                        self.new_uuids.append(new_uuid)
 | 
			
		||||
                        good += 1
 | 
			
		||||
            except Exception as e:
 | 
			
		||||
                print(e)
 | 
			
		||||
                flash(f"Error processing row number {row_id}, check all cell data types are correct, row was skipped.", 'error')
 | 
			
		||||
            else:
 | 
			
		||||
                row_id += 1
 | 
			
		||||
 | 
			
		||||
        flash(
 | 
			
		||||
            "{} imported from Wachete .xlsx in {:.2f}s".format(len(self.new_uuids), time.time() - now))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class import_xlsx_custom(Importer):
 | 
			
		||||
 | 
			
		||||
    def run(self,
 | 
			
		||||
            data,
 | 
			
		||||
            flash,
 | 
			
		||||
            datastore,
 | 
			
		||||
            ):
 | 
			
		||||
 | 
			
		||||
        good = 0
 | 
			
		||||
        now = time.time()
 | 
			
		||||
        self.new_uuids = []
 | 
			
		||||
 | 
			
		||||
        from openpyxl import load_workbook
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            wb = load_workbook(data)
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            # @todo correct except
 | 
			
		||||
            flash("Unable to read export XLSX file, something wrong with the file?", 'error')
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        # @todo cehck atleast 2 rows, same in other method
 | 
			
		||||
        from .forms import validate_url
 | 
			
		||||
        row_i = 1
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            for row in wb.active.iter_rows():
 | 
			
		||||
                url = None
 | 
			
		||||
                tags = None
 | 
			
		||||
                extras = {}
 | 
			
		||||
 | 
			
		||||
                for cell in row:
 | 
			
		||||
                    if not self.import_profile.get(cell.col_idx):
 | 
			
		||||
                        continue
 | 
			
		||||
                    if not cell.value:
 | 
			
		||||
                        continue
 | 
			
		||||
 | 
			
		||||
                    cell_map = self.import_profile.get(cell.col_idx)
 | 
			
		||||
 | 
			
		||||
                    cell_val = str(cell.value).strip()  # could be bool
 | 
			
		||||
 | 
			
		||||
                    if cell_map == 'url':
 | 
			
		||||
                        url = cell.value.strip()
 | 
			
		||||
                        try:
 | 
			
		||||
                            validate_url(url)
 | 
			
		||||
                        except ValidationError as e:
 | 
			
		||||
                            print(">> Import URL error", url, str(e))
 | 
			
		||||
                            flash(f"Error processing row number {row_i}, URL value was incorrect, row was skipped.", 'error')
 | 
			
		||||
                            # Don't bother processing anything else on this row
 | 
			
		||||
                            url = None
 | 
			
		||||
                            break
 | 
			
		||||
                    elif cell_map == 'tag':
 | 
			
		||||
                        tags = cell.value.strip()
 | 
			
		||||
                    elif cell_map == 'include_filters':
 | 
			
		||||
                        # @todo validate?
 | 
			
		||||
                        extras['include_filters'] = [cell.value.strip()]
 | 
			
		||||
                    elif cell_map == 'interval_minutes':
 | 
			
		||||
                        hours, minutes = divmod(int(cell_val), 60)
 | 
			
		||||
                        days, hours = divmod(hours, 24)
 | 
			
		||||
                        weeks, days = divmod(days, 7)
 | 
			
		||||
                        extras['time_between_check'] = {'weeks': weeks, 'days': days, 'hours': hours, 'minutes': minutes, 'seconds': 0}
 | 
			
		||||
                    else:
 | 
			
		||||
                        extras[cell_map] = cell_val
 | 
			
		||||
 | 
			
		||||
                # At minimum a URL is required.
 | 
			
		||||
                if url:
 | 
			
		||||
                    new_uuid = datastore.add_watch(url=url,
 | 
			
		||||
                                                   extras=extras,
 | 
			
		||||
                                                   tag=tags,
 | 
			
		||||
                                                   write_to_disk_now=False)
 | 
			
		||||
                    if new_uuid:
 | 
			
		||||
                        # Straight into the queue.
 | 
			
		||||
                        self.new_uuids.append(new_uuid)
 | 
			
		||||
                        good += 1
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            print(e)
 | 
			
		||||
            flash(f"Error processing row number {row_i}, check all cell data types are correct, row was skipped.", 'error')
 | 
			
		||||
        else:
 | 
			
		||||
            row_i += 1
 | 
			
		||||
 | 
			
		||||
        flash(
 | 
			
		||||
            "{} imported from custom .xlsx in {:.2f}s".format(len(self.new_uuids), time.time() - now))
 | 
			
		||||
 
 | 
			
		||||
@@ -16,7 +16,6 @@ class model(dict):
 | 
			
		||||
                },
 | 
			
		||||
                'requests': {
 | 
			
		||||
                    'extra_proxies': [], # Configurable extra proxies via the UI
 | 
			
		||||
                    'extra_browsers': [],  # Configurable extra proxies via the UI
 | 
			
		||||
                    'jitter_seconds': 0,
 | 
			
		||||
                    'proxy': None, # Preferred proxy connection
 | 
			
		||||
                    'time_between_check': {'weeks': None, 'days': None, 'hours': 3, 'minutes': None, 'seconds': None},
 | 
			
		||||
 
 | 
			
		||||
@@ -4,7 +4,6 @@ import os
 | 
			
		||||
import re
 | 
			
		||||
import time
 | 
			
		||||
import uuid
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
 | 
			
		||||
# Allowable protocols, protects against javascript: etc
 | 
			
		||||
# file:// is further checked by ALLOW_FILE_URI
 | 
			
		||||
@@ -19,8 +18,6 @@ from changedetectionio.notification import (
 | 
			
		||||
 | 
			
		||||
base_config = {
 | 
			
		||||
    'body': None,
 | 
			
		||||
    'browser_steps': [],
 | 
			
		||||
    'browser_steps_last_error_step': None,
 | 
			
		||||
    'check_unique_lines': False,  # On change-detected, compare against all history if its something new
 | 
			
		||||
    'check_count': 0,
 | 
			
		||||
    'date_created': None,
 | 
			
		||||
@@ -28,7 +25,6 @@ base_config = {
 | 
			
		||||
    'extract_text': [],  # Extract text by regex after filters
 | 
			
		||||
    'extract_title_as_title': False,
 | 
			
		||||
    'fetch_backend': 'system', # plaintext, playwright etc
 | 
			
		||||
    'fetch_time': 0.0,
 | 
			
		||||
    'processor': 'text_json_diff', # could be restock_diff or others from .processors
 | 
			
		||||
    'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
 | 
			
		||||
    'filter_text_added': True,
 | 
			
		||||
@@ -146,14 +142,8 @@ class model(dict):
 | 
			
		||||
                flash(message, 'error')
 | 
			
		||||
                return ''
 | 
			
		||||
 | 
			
		||||
        if ready_url.startswith('source:'):
 | 
			
		||||
            ready_url=ready_url.replace('source:', '')
 | 
			
		||||
        return ready_url
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def is_source_type_url(self):
 | 
			
		||||
        return self.get('url', '').startswith('source:')
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def get_fetch_backend(self):
 | 
			
		||||
        """
 | 
			
		||||
@@ -177,7 +167,9 @@ class model(dict):
 | 
			
		||||
    @property
 | 
			
		||||
    def label(self):
 | 
			
		||||
        # Used for sorting
 | 
			
		||||
        return self.get('title') if self.get('title') else self.get('url')
 | 
			
		||||
        if self['title']:
 | 
			
		||||
            return self['title']
 | 
			
		||||
        return self['url']
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def last_changed(self):
 | 
			
		||||
@@ -241,14 +233,6 @@ class model(dict):
 | 
			
		||||
        fname = os.path.join(self.watch_data_dir, "history.txt")
 | 
			
		||||
        return os.path.isfile(fname)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def has_browser_steps(self):
 | 
			
		||||
        has_browser_steps = self.get('browser_steps') and list(filter(
 | 
			
		||||
                lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
 | 
			
		||||
                self.get('browser_steps')))
 | 
			
		||||
 | 
			
		||||
        return  has_browser_steps
 | 
			
		||||
 | 
			
		||||
    # Returns the newest key, but if theres only 1 record, then it's counted as not being new, so return 0.
 | 
			
		||||
    @property
 | 
			
		||||
    def newest_history_key(self):
 | 
			
		||||
@@ -262,38 +246,6 @@ class model(dict):
 | 
			
		||||
        bump = self.history
 | 
			
		||||
        return self.__newest_history_key
 | 
			
		||||
 | 
			
		||||
    # Given an arbitrary timestamp, find the closest next key
 | 
			
		||||
    # For example, last_viewed = 1000 so it should return the next 1001 timestamp
 | 
			
		||||
    #
 | 
			
		||||
    # used for the [diff] button so it can preset a smarter from_version
 | 
			
		||||
    @property
 | 
			
		||||
    def get_next_snapshot_key_to_last_viewed(self):
 | 
			
		||||
 | 
			
		||||
        """Unfortunately for now timestamp is stored as string key"""
 | 
			
		||||
        keys = list(self.history.keys())
 | 
			
		||||
        if not keys:
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
        last_viewed = int(self.get('last_viewed'))
 | 
			
		||||
        prev_k = keys[0]
 | 
			
		||||
        sorted_keys = sorted(keys, key=lambda x: int(x))
 | 
			
		||||
        sorted_keys.reverse()
 | 
			
		||||
 | 
			
		||||
        # When the 'last viewed' timestamp is greater than the newest snapshot, return second last
 | 
			
		||||
        if last_viewed > int(sorted_keys[0]):
 | 
			
		||||
            return sorted_keys[1]
 | 
			
		||||
 | 
			
		||||
        for k in sorted_keys:
 | 
			
		||||
            if int(k) < last_viewed:
 | 
			
		||||
                if prev_k == sorted_keys[0]:
 | 
			
		||||
                    # Return the second last one so we dont recommend the same version compares itself
 | 
			
		||||
                    return sorted_keys[1]
 | 
			
		||||
 | 
			
		||||
                return prev_k
 | 
			
		||||
            prev_k = k
 | 
			
		||||
 | 
			
		||||
        return keys[0]
 | 
			
		||||
 | 
			
		||||
    def get_history_snapshot(self, timestamp):
 | 
			
		||||
        import brotli
 | 
			
		||||
        filepath = self.history[timestamp]
 | 
			
		||||
@@ -539,13 +491,3 @@ class model(dict):
 | 
			
		||||
        filepath = os.path.join(self.watch_data_dir, 'last-fetched.br')
 | 
			
		||||
        with open(filepath, 'wb') as f:
 | 
			
		||||
            f.write(brotli.compress(contents, mode=brotli.MODE_TEXT))
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def get_browsersteps_available_screenshots(self):
 | 
			
		||||
        "For knowing which screenshots are available to show the user in BrowserSteps UI"
 | 
			
		||||
        available = []
 | 
			
		||||
        for f in Path(self.watch_data_dir).glob('step_before-*.jpeg'):
 | 
			
		||||
            step_n=re.search(r'step_before-(\d+)', f.name)
 | 
			
		||||
            if step_n:
 | 
			
		||||
                available.append(step_n.group(1))
 | 
			
		||||
        return available
 | 
			
		||||
 
 | 
			
		||||
@@ -1,122 +1,15 @@
 | 
			
		||||
from abc import abstractmethod
 | 
			
		||||
import os
 | 
			
		||||
import hashlib
 | 
			
		||||
import re
 | 
			
		||||
from changedetectionio import content_fetcher
 | 
			
		||||
from copy import deepcopy
 | 
			
		||||
from distutils.util import strtobool
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class difference_detection_processor():
 | 
			
		||||
 | 
			
		||||
    browser_steps = None
 | 
			
		||||
    datastore = None
 | 
			
		||||
    fetcher = None
 | 
			
		||||
    screenshot = None
 | 
			
		||||
    watch = None
 | 
			
		||||
    xpath_data = None
 | 
			
		||||
 | 
			
		||||
    def __init__(self, *args, datastore, watch_uuid, **kwargs):
 | 
			
		||||
    def __init__(self, *args, **kwargs):
 | 
			
		||||
        super().__init__(*args, **kwargs)
 | 
			
		||||
        self.datastore = datastore
 | 
			
		||||
        self.watch = deepcopy(self.datastore.data['watching'].get(watch_uuid))
 | 
			
		||||
 | 
			
		||||
    def call_browser(self):
 | 
			
		||||
 | 
			
		||||
        # Protect against file:// access
 | 
			
		||||
        if re.search(r'^file://', self.watch.get('url', '').strip(), re.IGNORECASE):
 | 
			
		||||
            if not strtobool(os.getenv('ALLOW_FILE_URI', 'false')):
 | 
			
		||||
                raise Exception(
 | 
			
		||||
                    "file:// type access is denied for security reasons."
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        url = self.watch.link
 | 
			
		||||
 | 
			
		||||
        # Requests, playwright, other browser via wss:// etc, fetch_extra_something
 | 
			
		||||
        prefer_fetch_backend = self.watch.get('fetch_backend', 'system')
 | 
			
		||||
 | 
			
		||||
        # Proxy ID "key"
 | 
			
		||||
        preferred_proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=self.watch.get('uuid'))
 | 
			
		||||
 | 
			
		||||
        # Pluggable content self.fetcher
 | 
			
		||||
        if not prefer_fetch_backend or prefer_fetch_backend == 'system':
 | 
			
		||||
            prefer_fetch_backend = self.datastore.data['settings']['application'].get('fetch_backend')
 | 
			
		||||
 | 
			
		||||
        # In the case that the preferred fetcher was a browser config with custom connection URL..
 | 
			
		||||
        # @todo - on save watch, if its extra_browser_ then it should be obvious it will use playwright (like if its requests now..)
 | 
			
		||||
        browser_connection_url = None
 | 
			
		||||
        if prefer_fetch_backend.startswith('extra_browser_'):
 | 
			
		||||
            (t, key) = prefer_fetch_backend.split('extra_browser_')
 | 
			
		||||
            connection = list(
 | 
			
		||||
                filter(lambda s: (s['browser_name'] == key), self.datastore.data['settings']['requests'].get('extra_browsers', [])))
 | 
			
		||||
            if connection:
 | 
			
		||||
                prefer_fetch_backend = 'base_html_playwright'
 | 
			
		||||
                browser_connection_url = connection[0].get('browser_connection_url')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
        # Grab the right kind of 'fetcher', (playwright, requests, etc)
 | 
			
		||||
        if hasattr(content_fetcher, prefer_fetch_backend):
 | 
			
		||||
            fetcher_obj = getattr(content_fetcher, prefer_fetch_backend)
 | 
			
		||||
        else:
 | 
			
		||||
            # If the klass doesnt exist, just use a default
 | 
			
		||||
            fetcher_obj = getattr(content_fetcher, "html_requests")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
        proxy_url = None
 | 
			
		||||
        if preferred_proxy_id:
 | 
			
		||||
            proxy_url = self.datastore.proxy_list.get(preferred_proxy_id).get('url')
 | 
			
		||||
            print(f"Using proxy Key: {preferred_proxy_id} as Proxy URL {proxy_url}")
 | 
			
		||||
 | 
			
		||||
        # Now call the fetcher (playwright/requests/etc) with arguments that only a fetcher would need.
 | 
			
		||||
        # When browser_connection_url is None, it method should default to working out whats the best defaults (os env vars etc)
 | 
			
		||||
        self.fetcher = fetcher_obj(proxy_override=proxy_url,
 | 
			
		||||
                                   browser_connection_url=browser_connection_url
 | 
			
		||||
                                   )
 | 
			
		||||
 | 
			
		||||
        if self.watch.has_browser_steps:
 | 
			
		||||
            self.fetcher.browser_steps = self.watch.get('browser_steps', [])
 | 
			
		||||
            self.fetcher.browser_steps_screenshot_path = os.path.join(self.datastore.datastore_path, self.watch.get('uuid'))
 | 
			
		||||
 | 
			
		||||
        # Tweak the base config with the per-watch ones
 | 
			
		||||
        request_headers = self.watch.get('headers', [])
 | 
			
		||||
        request_headers.update(self.datastore.get_all_base_headers())
 | 
			
		||||
        request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=self.watch.get('uuid')))
 | 
			
		||||
 | 
			
		||||
        # https://github.com/psf/requests/issues/4525
 | 
			
		||||
        # Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
 | 
			
		||||
        # do this by accident.
 | 
			
		||||
        if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']:
 | 
			
		||||
            request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
 | 
			
		||||
 | 
			
		||||
        timeout = self.datastore.data['settings']['requests'].get('timeout')
 | 
			
		||||
 | 
			
		||||
        request_body = self.watch.get('body')
 | 
			
		||||
        request_method = self.watch.get('method')
 | 
			
		||||
        ignore_status_codes = self.watch.get('ignore_status_codes', False)
 | 
			
		||||
 | 
			
		||||
        # Configurable per-watch or global extra delay before extracting text (for webDriver types)
 | 
			
		||||
        system_webdriver_delay = self.datastore.data['settings']['application'].get('webdriver_delay', None)
 | 
			
		||||
        if self.watch.get('webdriver_delay'):
 | 
			
		||||
            self.fetcher.render_extract_delay = self.watch.get('webdriver_delay')
 | 
			
		||||
        elif system_webdriver_delay is not None:
 | 
			
		||||
            self.fetcher.render_extract_delay = system_webdriver_delay
 | 
			
		||||
 | 
			
		||||
        if self.watch.get('webdriver_js_execute_code') is not None and self.watch.get('webdriver_js_execute_code').strip():
 | 
			
		||||
            self.fetcher.webdriver_js_execute_code = self.watch.get('webdriver_js_execute_code')
 | 
			
		||||
 | 
			
		||||
        # Requests for PDF's, images etc should be passwd the is_binary flag
 | 
			
		||||
        is_binary = self.watch.is_pdf
 | 
			
		||||
 | 
			
		||||
        # And here we go! call the right browser with browser-specific settings
 | 
			
		||||
        self.fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, self.watch.get('include_filters'),
 | 
			
		||||
                    is_binary=is_binary)
 | 
			
		||||
 | 
			
		||||
        #@todo .quit here could go on close object, so we can run JS if change-detected
 | 
			
		||||
        self.fetcher.quit()
 | 
			
		||||
 | 
			
		||||
        # After init, call run_changedetection() which will do the actual change-detection
 | 
			
		||||
 | 
			
		||||
    @abstractmethod
 | 
			
		||||
    def run_changedetection(self, uuid, skip_when_checksum_same=True):
 | 
			
		||||
    def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
 | 
			
		||||
        update_obj = {'last_notification_error': False, 'last_error': False}
 | 
			
		||||
        some_data = 'xxxxx'
 | 
			
		||||
        update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,10 @@
 | 
			
		||||
 | 
			
		||||
import hashlib
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import urllib3
 | 
			
		||||
from . import difference_detection_processor
 | 
			
		||||
from changedetectionio import content_fetcher
 | 
			
		||||
from copy import deepcopy
 | 
			
		||||
 | 
			
		||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
 | 
			
		||||
@@ -19,7 +22,11 @@ class perform_site_check(difference_detection_processor):
 | 
			
		||||
    screenshot = None
 | 
			
		||||
    xpath_data = None
 | 
			
		||||
 | 
			
		||||
    def run_changedetection(self, uuid, skip_when_checksum_same=True):
 | 
			
		||||
    def __init__(self, *args, datastore, **kwargs):
 | 
			
		||||
        super().__init__(*args, **kwargs)
 | 
			
		||||
        self.datastore = datastore
 | 
			
		||||
 | 
			
		||||
    def run(self, uuid, skip_when_checksum_same=True):
 | 
			
		||||
 | 
			
		||||
        # DeepCopy so we can be sure we don't accidently change anything by reference
 | 
			
		||||
        watch = deepcopy(self.datastore.data['watching'].get(uuid))
 | 
			
		||||
@@ -27,24 +34,84 @@ class perform_site_check(difference_detection_processor):
 | 
			
		||||
        if not watch:
 | 
			
		||||
            raise Exception("Watch no longer exists.")
 | 
			
		||||
 | 
			
		||||
        # Protect against file:// access
 | 
			
		||||
        if re.search(r'^file', watch.get('url', ''), re.IGNORECASE) and not os.getenv('ALLOW_FILE_URI', False):
 | 
			
		||||
            raise Exception(
 | 
			
		||||
                "file:// type access is denied for security reasons."
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # Unset any existing notification error
 | 
			
		||||
        update_obj = {'last_notification_error': False, 'last_error': False}
 | 
			
		||||
 | 
			
		||||
        self.screenshot = self.fetcher.screenshot
 | 
			
		||||
        self.xpath_data = self.fetcher.xpath_data
 | 
			
		||||
        request_headers = watch.get('headers', [])
 | 
			
		||||
        request_headers.update(self.datastore.get_all_base_headers())
 | 
			
		||||
        request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=uuid))
 | 
			
		||||
 | 
			
		||||
        # https://github.com/psf/requests/issues/4525
 | 
			
		||||
        # Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
 | 
			
		||||
        # do this by accident.
 | 
			
		||||
        if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']:
 | 
			
		||||
            request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
 | 
			
		||||
 | 
			
		||||
        timeout = self.datastore.data['settings']['requests'].get('timeout')
 | 
			
		||||
 | 
			
		||||
        url = watch.link
 | 
			
		||||
 | 
			
		||||
        request_body = self.datastore.data['watching'][uuid].get('body')
 | 
			
		||||
        request_method = self.datastore.data['watching'][uuid].get('method')
 | 
			
		||||
        ignore_status_codes = self.datastore.data['watching'][uuid].get('ignore_status_codes', False)
 | 
			
		||||
 | 
			
		||||
        # Pluggable content fetcher
 | 
			
		||||
        prefer_backend = watch.get_fetch_backend
 | 
			
		||||
        if not prefer_backend or prefer_backend == 'system':
 | 
			
		||||
            prefer_backend = self.datastore.data['settings']['application']['fetch_backend']
 | 
			
		||||
 | 
			
		||||
        if hasattr(content_fetcher, prefer_backend):
 | 
			
		||||
            klass = getattr(content_fetcher, prefer_backend)
 | 
			
		||||
        else:
 | 
			
		||||
            # If the klass doesnt exist, just use a default
 | 
			
		||||
            klass = getattr(content_fetcher, "html_requests")
 | 
			
		||||
 | 
			
		||||
        proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
 | 
			
		||||
        proxy_url = None
 | 
			
		||||
        if proxy_id:
 | 
			
		||||
            proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
 | 
			
		||||
            print("UUID {} Using proxy {}".format(uuid, proxy_url))
 | 
			
		||||
 | 
			
		||||
        fetcher = klass(proxy_override=proxy_url)
 | 
			
		||||
 | 
			
		||||
        # Configurable per-watch or global extra delay before extracting text (for webDriver types)
 | 
			
		||||
        system_webdriver_delay = self.datastore.data['settings']['application'].get('webdriver_delay', None)
 | 
			
		||||
        if watch['webdriver_delay'] is not None:
 | 
			
		||||
            fetcher.render_extract_delay = watch.get('webdriver_delay')
 | 
			
		||||
        elif system_webdriver_delay is not None:
 | 
			
		||||
            fetcher.render_extract_delay = system_webdriver_delay
 | 
			
		||||
 | 
			
		||||
        # Could be removed if requests/plaintext could also return some info?
 | 
			
		||||
        if prefer_backend != 'html_webdriver':
 | 
			
		||||
            raise Exception("Re-stock detection requires Chrome or compatible webdriver/playwright fetcher to work")
 | 
			
		||||
 | 
			
		||||
        if watch.get('webdriver_js_execute_code') is not None and watch.get('webdriver_js_execute_code').strip():
 | 
			
		||||
            fetcher.webdriver_js_execute_code = watch.get('webdriver_js_execute_code')
 | 
			
		||||
 | 
			
		||||
        fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'))
 | 
			
		||||
        fetcher.quit()
 | 
			
		||||
 | 
			
		||||
        self.screenshot = fetcher.screenshot
 | 
			
		||||
        self.xpath_data = fetcher.xpath_data
 | 
			
		||||
 | 
			
		||||
        # Track the content type
 | 
			
		||||
        update_obj['content_type'] = self.fetcher.headers.get('Content-Type', '')
 | 
			
		||||
        update_obj["last_check_status"] = self.fetcher.get_last_status_code()
 | 
			
		||||
        update_obj['content_type'] = fetcher.headers.get('Content-Type', '')
 | 
			
		||||
        update_obj["last_check_status"] = fetcher.get_last_status_code()
 | 
			
		||||
 | 
			
		||||
        # Main detection method
 | 
			
		||||
        fetched_md5 = None
 | 
			
		||||
        if self.fetcher.instock_data:
 | 
			
		||||
            fetched_md5 = hashlib.md5(self.fetcher.instock_data.encode('utf-8')).hexdigest()
 | 
			
		||||
        if fetcher.instock_data:
 | 
			
		||||
            fetched_md5 = hashlib.md5(fetcher.instock_data.encode('utf-8')).hexdigest()
 | 
			
		||||
            # 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold.
 | 
			
		||||
            update_obj["in_stock"] = True if self.fetcher.instock_data == 'Possibly in stock' else False
 | 
			
		||||
            update_obj["in_stock"] = True if fetcher.instock_data == 'Possibly in stock' else False
 | 
			
		||||
        else:
 | 
			
		||||
            raise UnableToExtractRestockData(status_code=self.fetcher.status_code)
 | 
			
		||||
            raise UnableToExtractRestockData(status_code=fetcher.status_code)
 | 
			
		||||
 | 
			
		||||
        # The main thing that all this at the moment comes down to :)
 | 
			
		||||
        changed_detected = False
 | 
			
		||||
@@ -61,4 +128,4 @@ class perform_site_check(difference_detection_processor):
 | 
			
		||||
        # Always record the new checksum
 | 
			
		||||
        update_obj["previous_md5"] = fetched_md5
 | 
			
		||||
 | 
			
		||||
        return changed_detected, update_obj, self.fetcher.instock_data.encode('utf-8')
 | 
			
		||||
        return changed_detected, update_obj, fetcher.instock_data.encode('utf-8')
 | 
			
		||||
 
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
# HTML to TEXT/JSON DIFFERENCE self.fetcher
 | 
			
		||||
# HTML to TEXT/JSON DIFFERENCE FETCHER
 | 
			
		||||
 | 
			
		||||
import hashlib
 | 
			
		||||
import json
 | 
			
		||||
@@ -11,7 +11,7 @@ from changedetectionio import content_fetcher, html_tools
 | 
			
		||||
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
 | 
			
		||||
from copy import deepcopy
 | 
			
		||||
from . import difference_detection_processor
 | 
			
		||||
from ..html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text
 | 
			
		||||
from ..html_tools import PERL_STYLE_REGEX
 | 
			
		||||
 | 
			
		||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
 | 
			
		||||
 | 
			
		||||
@@ -32,10 +32,15 @@ class PDFToHTMLToolNotFound(ValueError):
 | 
			
		||||
# Some common stuff here that can be moved to a base class
 | 
			
		||||
# (set_proxy_from_list)
 | 
			
		||||
class perform_site_check(difference_detection_processor):
 | 
			
		||||
    screenshot = None
 | 
			
		||||
    xpath_data = None
 | 
			
		||||
 | 
			
		||||
    def run_changedetection(self, uuid, skip_when_checksum_same=True):
 | 
			
		||||
    def __init__(self, *args, datastore, **kwargs):
 | 
			
		||||
        super().__init__(*args, **kwargs)
 | 
			
		||||
        self.datastore = datastore
 | 
			
		||||
 | 
			
		||||
    def run(self, uuid, skip_when_checksum_same=True, preferred_proxy=None):
 | 
			
		||||
        changed_detected = False
 | 
			
		||||
        html_content = ""
 | 
			
		||||
        screenshot = False  # as bytes
 | 
			
		||||
        stripped_text_from_html = ""
 | 
			
		||||
 | 
			
		||||
@@ -44,25 +49,100 @@ class perform_site_check(difference_detection_processor):
 | 
			
		||||
        if not watch:
 | 
			
		||||
            raise Exception("Watch no longer exists.")
 | 
			
		||||
 | 
			
		||||
        # Protect against file:// access
 | 
			
		||||
        if re.search(r'^file', watch.get('url', ''), re.IGNORECASE) and not os.getenv('ALLOW_FILE_URI', False):
 | 
			
		||||
            raise Exception(
 | 
			
		||||
                "file:// type access is denied for security reasons."
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # Unset any existing notification error
 | 
			
		||||
        update_obj = {'last_notification_error': False, 'last_error': False}
 | 
			
		||||
 | 
			
		||||
        # Tweak the base config with the per-watch ones
 | 
			
		||||
        request_headers = watch.get('headers', [])
 | 
			
		||||
        request_headers.update(self.datastore.get_all_base_headers())
 | 
			
		||||
        request_headers.update(self.datastore.get_all_headers_in_textfile_for_watch(uuid=uuid))
 | 
			
		||||
 | 
			
		||||
        # https://github.com/psf/requests/issues/4525
 | 
			
		||||
        # Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
 | 
			
		||||
        # do this by accident.
 | 
			
		||||
        if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']:
 | 
			
		||||
            request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
 | 
			
		||||
 | 
			
		||||
        timeout = self.datastore.data['settings']['requests'].get('timeout')
 | 
			
		||||
 | 
			
		||||
        url = watch.link
 | 
			
		||||
 | 
			
		||||
        self.screenshot = self.fetcher.screenshot
 | 
			
		||||
        self.xpath_data = self.fetcher.xpath_data
 | 
			
		||||
        request_body = self.datastore.data['watching'][uuid].get('body')
 | 
			
		||||
        request_method = self.datastore.data['watching'][uuid].get('method')
 | 
			
		||||
        ignore_status_codes = self.datastore.data['watching'][uuid].get('ignore_status_codes', False)
 | 
			
		||||
 | 
			
		||||
        # source: support
 | 
			
		||||
        is_source = False
 | 
			
		||||
        if url.startswith('source:'):
 | 
			
		||||
            url = url.replace('source:', '')
 | 
			
		||||
            is_source = True
 | 
			
		||||
 | 
			
		||||
        # Pluggable content fetcher
 | 
			
		||||
        prefer_backend = watch.get_fetch_backend
 | 
			
		||||
        if not prefer_backend or prefer_backend == 'system':
 | 
			
		||||
            prefer_backend = self.datastore.data['settings']['application']['fetch_backend']
 | 
			
		||||
 | 
			
		||||
        if hasattr(content_fetcher, prefer_backend):
 | 
			
		||||
            klass = getattr(content_fetcher, prefer_backend)
 | 
			
		||||
        else:
 | 
			
		||||
            # If the klass doesnt exist, just use a default
 | 
			
		||||
            klass = getattr(content_fetcher, "html_requests")
 | 
			
		||||
 | 
			
		||||
        if preferred_proxy:
 | 
			
		||||
            proxy_id = preferred_proxy
 | 
			
		||||
        else:
 | 
			
		||||
            proxy_id = self.datastore.get_preferred_proxy_for_watch(uuid=uuid)
 | 
			
		||||
 | 
			
		||||
        proxy_url = None
 | 
			
		||||
        if proxy_id:
 | 
			
		||||
            proxy_url = self.datastore.proxy_list.get(proxy_id).get('url')
 | 
			
		||||
            print("UUID {} Using proxy {}".format(uuid, proxy_url))
 | 
			
		||||
 | 
			
		||||
        fetcher = klass(proxy_override=proxy_url)
 | 
			
		||||
 | 
			
		||||
        # Configurable per-watch or global extra delay before extracting text (for webDriver types)
 | 
			
		||||
        system_webdriver_delay = self.datastore.data['settings']['application'].get('webdriver_delay', None)
 | 
			
		||||
        if watch['webdriver_delay'] is not None:
 | 
			
		||||
            fetcher.render_extract_delay = watch.get('webdriver_delay')
 | 
			
		||||
        elif system_webdriver_delay is not None:
 | 
			
		||||
            fetcher.render_extract_delay = system_webdriver_delay
 | 
			
		||||
 | 
			
		||||
        # Possible conflict
 | 
			
		||||
        if prefer_backend == 'html_webdriver':
 | 
			
		||||
            fetcher.browser_steps = watch.get('browser_steps', None)
 | 
			
		||||
            fetcher.browser_steps_screenshot_path = os.path.join(self.datastore.datastore_path, uuid)
 | 
			
		||||
 | 
			
		||||
        if watch.get('webdriver_js_execute_code') is not None and watch.get('webdriver_js_execute_code').strip():
 | 
			
		||||
            fetcher.webdriver_js_execute_code = watch.get('webdriver_js_execute_code')
 | 
			
		||||
 | 
			
		||||
        # requests for PDF's, images etc should be passwd the is_binary flag
 | 
			
		||||
        is_binary = watch.is_pdf
 | 
			
		||||
 | 
			
		||||
        fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch.get('include_filters'),
 | 
			
		||||
                    is_binary=is_binary)
 | 
			
		||||
        fetcher.quit()
 | 
			
		||||
 | 
			
		||||
        self.screenshot = fetcher.screenshot
 | 
			
		||||
        self.xpath_data = fetcher.xpath_data
 | 
			
		||||
 | 
			
		||||
        # Track the content type
 | 
			
		||||
        update_obj['content_type'] = self.fetcher.get_all_headers().get('content-type', '').lower()
 | 
			
		||||
        update_obj['content_type'] = fetcher.get_all_headers().get('content-type', '').lower()
 | 
			
		||||
 | 
			
		||||
        # Watches added automatically in the queue manager will skip if its the same checksum as the previous run
 | 
			
		||||
        # Saves a lot of CPU
 | 
			
		||||
        update_obj['previous_md5_before_filters'] = hashlib.md5(self.fetcher.content.encode('utf-8')).hexdigest()
 | 
			
		||||
        update_obj['previous_md5_before_filters'] = hashlib.md5(fetcher.content.encode('utf-8')).hexdigest()
 | 
			
		||||
        if skip_when_checksum_same:
 | 
			
		||||
            if update_obj['previous_md5_before_filters'] == watch.get('previous_md5_before_filters'):
 | 
			
		||||
                raise content_fetcher.checksumFromPreviousCheckWasTheSame()
 | 
			
		||||
 | 
			
		||||
        # Fetching complete, now filters
 | 
			
		||||
        # @todo move to class / maybe inside of fetcher abstract base?
 | 
			
		||||
 | 
			
		||||
        # @note: I feel like the following should be in a more obvious chain system
 | 
			
		||||
        #  - Check filter text
 | 
			
		||||
@@ -71,24 +151,15 @@ class perform_site_check(difference_detection_processor):
 | 
			
		||||
        # https://stackoverflow.com/questions/41817578/basic-method-chaining ?
 | 
			
		||||
        # return content().textfilter().jsonextract().checksumcompare() ?
 | 
			
		||||
 | 
			
		||||
        is_json = 'application/json' in self.fetcher.get_all_headers().get('content-type', '').lower()
 | 
			
		||||
        is_json = 'application/json' in fetcher.get_all_headers().get('content-type', '').lower()
 | 
			
		||||
        is_html = not is_json
 | 
			
		||||
        is_rss = False
 | 
			
		||||
 | 
			
		||||
        ctype_header = self.fetcher.get_all_headers().get('content-type', '').lower()
 | 
			
		||||
        # Go into RSS preprocess for converting CDATA/comment to usable text
 | 
			
		||||
        if any(substring in ctype_header for substring in ['application/xml', 'application/rss', 'text/xml']):
 | 
			
		||||
            if '<rss' in self.fetcher.content[:100].lower():
 | 
			
		||||
                self.fetcher.content = cdata_in_document_to_text(html_content=self.fetcher.content)
 | 
			
		||||
                is_rss = True
 | 
			
		||||
 | 
			
		||||
        # source: support, basically treat it as plaintext
 | 
			
		||||
        if watch.is_source_type_url:
 | 
			
		||||
        if is_source:
 | 
			
		||||
            is_html = False
 | 
			
		||||
            is_json = False
 | 
			
		||||
 | 
			
		||||
        inline_pdf = self.fetcher.get_all_headers().get('content-disposition', '') and '%PDF-1' in self.fetcher.content[:10]
 | 
			
		||||
        if watch.is_pdf or 'application/pdf' in self.fetcher.get_all_headers().get('content-type', '').lower() or inline_pdf:
 | 
			
		||||
        if watch.is_pdf or 'application/pdf' in fetcher.get_all_headers().get('content-type', '').lower():
 | 
			
		||||
            from shutil import which
 | 
			
		||||
            tool = os.getenv("PDF_TO_HTML_TOOL", "pdftohtml")
 | 
			
		||||
            if not which(tool):
 | 
			
		||||
@@ -99,18 +170,18 @@ class perform_site_check(difference_detection_processor):
 | 
			
		||||
                [tool, '-stdout', '-', '-s', 'out.pdf', '-i'],
 | 
			
		||||
                stdout=subprocess.PIPE,
 | 
			
		||||
                stdin=subprocess.PIPE)
 | 
			
		||||
            proc.stdin.write(self.fetcher.raw_content)
 | 
			
		||||
            proc.stdin.write(fetcher.raw_content)
 | 
			
		||||
            proc.stdin.close()
 | 
			
		||||
            self.fetcher.content = proc.stdout.read().decode('utf-8')
 | 
			
		||||
            fetcher.content = proc.stdout.read().decode('utf-8')
 | 
			
		||||
            proc.wait(timeout=60)
 | 
			
		||||
 | 
			
		||||
            # Add a little metadata so we know if the file changes (like if an image changes, but the text is the same
 | 
			
		||||
            # @todo may cause problems with non-UTF8?
 | 
			
		||||
            metadata = "<p>Added by changedetection.io: Document checksum - {} Filesize - {} bytes</p>".format(
 | 
			
		||||
                hashlib.md5(self.fetcher.raw_content).hexdigest().upper(),
 | 
			
		||||
                len(self.fetcher.content))
 | 
			
		||||
                hashlib.md5(fetcher.raw_content).hexdigest().upper(),
 | 
			
		||||
                len(fetcher.content))
 | 
			
		||||
 | 
			
		||||
            self.fetcher.content = self.fetcher.content.replace('</body>', metadata + '</body>')
 | 
			
		||||
            fetcher.content = fetcher.content.replace('</body>', metadata + '</body>')
 | 
			
		||||
 | 
			
		||||
        # Better would be if Watch.model could access the global data also
 | 
			
		||||
        # and then use getattr https://docs.python.org/3/reference/datamodel.html#object.__getitem__
 | 
			
		||||
@@ -137,7 +208,7 @@ class perform_site_check(difference_detection_processor):
 | 
			
		||||
        if is_json:
 | 
			
		||||
            # Sort the JSON so we dont get false alerts when the content is just re-ordered
 | 
			
		||||
            try:
 | 
			
		||||
                self.fetcher.content = json.dumps(json.loads(self.fetcher.content), sort_keys=True)
 | 
			
		||||
                fetcher.content = json.dumps(json.loads(fetcher.content), sort_keys=True)
 | 
			
		||||
            except Exception as e:
 | 
			
		||||
                # Might have just been a snippet, or otherwise bad JSON, continue
 | 
			
		||||
                pass
 | 
			
		||||
@@ -145,22 +216,22 @@ class perform_site_check(difference_detection_processor):
 | 
			
		||||
        if has_filter_rule:
 | 
			
		||||
            for filter in include_filters_rule:
 | 
			
		||||
                if any(prefix in filter for prefix in json_filter_prefixes):
 | 
			
		||||
                    stripped_text_from_html += html_tools.extract_json_as_string(content=self.fetcher.content, json_filter=filter)
 | 
			
		||||
                    stripped_text_from_html += html_tools.extract_json_as_string(content=fetcher.content, json_filter=filter)
 | 
			
		||||
                    is_html = False
 | 
			
		||||
 | 
			
		||||
        if is_html or watch.is_source_type_url:
 | 
			
		||||
        if is_html or is_source:
 | 
			
		||||
 | 
			
		||||
            # CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
 | 
			
		||||
            self.fetcher.content = html_tools.workarounds_for_obfuscations(self.fetcher.content)
 | 
			
		||||
            html_content = self.fetcher.content
 | 
			
		||||
            fetcher.content = html_tools.workarounds_for_obfuscations(fetcher.content)
 | 
			
		||||
            html_content = fetcher.content
 | 
			
		||||
 | 
			
		||||
            # If not JSON,  and if it's not text/plain..
 | 
			
		||||
            if 'text/plain' in self.fetcher.get_all_headers().get('content-type', '').lower():
 | 
			
		||||
            if 'text/plain' in fetcher.get_all_headers().get('content-type', '').lower():
 | 
			
		||||
                # Don't run get_text or xpath/css filters on plaintext
 | 
			
		||||
                stripped_text_from_html = html_content
 | 
			
		||||
            else:
 | 
			
		||||
                # Does it have some ld+json price data? used for easier monitoring
 | 
			
		||||
                update_obj['has_ldjson_price_data'] = html_tools.has_ldjson_product_info(self.fetcher.content)
 | 
			
		||||
                update_obj['has_ldjson_price_data'] = html_tools.has_ldjson_product_info(fetcher.content)
 | 
			
		||||
 | 
			
		||||
                # Then we assume HTML
 | 
			
		||||
                if has_filter_rule:
 | 
			
		||||
@@ -170,19 +241,13 @@ class perform_site_check(difference_detection_processor):
 | 
			
		||||
                        # For HTML/XML we offer xpath as an option, just start a regular xPath "/.."
 | 
			
		||||
                        if filter_rule[0] == '/' or filter_rule.startswith('xpath:'):
 | 
			
		||||
                            html_content += html_tools.xpath_filter(xpath_filter=filter_rule.replace('xpath:', ''),
 | 
			
		||||
                                                                    html_content=self.fetcher.content,
 | 
			
		||||
                                                                    append_pretty_line_formatting=not watch.is_source_type_url,
 | 
			
		||||
                                                                    is_rss=is_rss)
 | 
			
		||||
                        elif filter_rule.startswith('xpath1:'):
 | 
			
		||||
                            html_content += html_tools.xpath1_filter(xpath_filter=filter_rule.replace('xpath1:', ''),
 | 
			
		||||
                                                                    html_content=self.fetcher.content,
 | 
			
		||||
                                                                    append_pretty_line_formatting=not watch.is_source_type_url,
 | 
			
		||||
                                                                    is_rss=is_rss)
 | 
			
		||||
                                                                    html_content=fetcher.content,
 | 
			
		||||
                                                                    append_pretty_line_formatting=not is_source)
 | 
			
		||||
                        else:
 | 
			
		||||
                            # CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
 | 
			
		||||
                            html_content += html_tools.include_filters(include_filters=filter_rule,
 | 
			
		||||
                                                                       html_content=self.fetcher.content,
 | 
			
		||||
                                                                       append_pretty_line_formatting=not watch.is_source_type_url)
 | 
			
		||||
                                                                       html_content=fetcher.content,
 | 
			
		||||
                                                                       append_pretty_line_formatting=not is_source)
 | 
			
		||||
 | 
			
		||||
                    if not html_content.strip():
 | 
			
		||||
                        raise FilterNotFoundInResponse(include_filters_rule)
 | 
			
		||||
@@ -190,16 +255,15 @@ class perform_site_check(difference_detection_processor):
 | 
			
		||||
                if has_subtractive_selectors:
 | 
			
		||||
                    html_content = html_tools.element_removal(subtractive_selectors, html_content)
 | 
			
		||||
 | 
			
		||||
                if watch.is_source_type_url:
 | 
			
		||||
                if is_source:
 | 
			
		||||
                    stripped_text_from_html = html_content
 | 
			
		||||
                else:
 | 
			
		||||
                    # extract text
 | 
			
		||||
                    do_anchor = self.datastore.data["settings"]["application"].get("render_anchor_tag_content", False)
 | 
			
		||||
                    stripped_text_from_html = \
 | 
			
		||||
                        html_tools.html_to_text(
 | 
			
		||||
                            html_content=html_content,
 | 
			
		||||
                            render_anchor_tag_content=do_anchor,
 | 
			
		||||
                            is_rss=is_rss # #1874 activate the <title workaround hack
 | 
			
		||||
                            html_content,
 | 
			
		||||
                            render_anchor_tag_content=do_anchor
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
        # Re #340 - return the content before the 'ignore text' was applied
 | 
			
		||||
@@ -236,7 +300,7 @@ class perform_site_check(difference_detection_processor):
 | 
			
		||||
        empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
 | 
			
		||||
        if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0:
 | 
			
		||||
            raise content_fetcher.ReplyWithContentButNoText(url=url,
 | 
			
		||||
                                                            status_code=self.fetcher.get_last_status_code(),
 | 
			
		||||
                                                            status_code=fetcher.get_last_status_code(),
 | 
			
		||||
                                                            screenshot=screenshot,
 | 
			
		||||
                                                            has_filters=has_filter_rule,
 | 
			
		||||
                                                            html_content=html_content
 | 
			
		||||
@@ -245,7 +309,7 @@ class perform_site_check(difference_detection_processor):
 | 
			
		||||
        # We rely on the actual text in the html output.. many sites have random script vars etc,
 | 
			
		||||
        # in the future we'll implement other mechanisms.
 | 
			
		||||
 | 
			
		||||
        update_obj["last_check_status"] = self.fetcher.get_last_status_code()
 | 
			
		||||
        update_obj["last_check_status"] = fetcher.get_last_status_code()
 | 
			
		||||
 | 
			
		||||
        # If there's text to skip
 | 
			
		||||
        # @todo we could abstract out the get_text() to handle this cleaner
 | 
			
		||||
@@ -333,7 +397,7 @@ class perform_site_check(difference_detection_processor):
 | 
			
		||||
        if is_html:
 | 
			
		||||
            if self.datastore.data['settings']['application'].get('extract_title_as_title') or watch['extract_title_as_title']:
 | 
			
		||||
                if not watch['title'] or not len(watch['title']):
 | 
			
		||||
                    update_obj['title'] = html_tools.extract_element(find='title', html_content=self.fetcher.content)
 | 
			
		||||
                    update_obj['title'] = html_tools.extract_element(find='title', html_content=fetcher.content)
 | 
			
		||||
 | 
			
		||||
        if changed_detected:
 | 
			
		||||
            if watch.get('check_unique_lines', False):
 | 
			
		||||
 
 | 
			
		||||
@@ -170,12 +170,9 @@ if (include_filters.length) {
 | 
			
		||||
 | 
			
		||||
        try {
 | 
			
		||||
            // is it xpath?
 | 
			
		||||
            if (f.startsWith('/') || f.startsWith('xpath')) {
 | 
			
		||||
                var qry_f = f.replace(/xpath(:|\d:)/, '')
 | 
			
		||||
                console.log("[xpath] Scanning for included filter " + qry_f)
 | 
			
		||||
                q = document.evaluate(qry_f, document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue;
 | 
			
		||||
            if (f.startsWith('/') || f.startsWith('xpath:')) {
 | 
			
		||||
                q = document.evaluate(f.replace('xpath:', ''), document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue;
 | 
			
		||||
            } else {
 | 
			
		||||
                console.log("[css] Scanning for included filter " + f)
 | 
			
		||||
                q = document.querySelector(f);
 | 
			
		||||
            }
 | 
			
		||||
        } catch (e) {
 | 
			
		||||
@@ -185,18 +182,8 @@ if (include_filters.length) {
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        if (q) {
 | 
			
		||||
            // Try to resolve //something/text() back to its /something so we can atleast get the bounding box
 | 
			
		||||
            try {
 | 
			
		||||
                if (typeof q.nodeName == 'string' && q.nodeName === '#text') {
 | 
			
		||||
                    q = q.parentElement
 | 
			
		||||
                }
 | 
			
		||||
            } catch (e) {
 | 
			
		||||
                console.log(e)
 | 
			
		||||
                console.log("xpath_element_scraper: #text resolver")
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            // #1231 - IN the case XPath attribute filter is applied, we will have to traverse up and find the element.
 | 
			
		||||
            if (typeof q.getBoundingClientRect == 'function') {
 | 
			
		||||
            if (q.hasOwnProperty('getBoundingClientRect')) {
 | 
			
		||||
                bbox = q.getBoundingClientRect();
 | 
			
		||||
                console.log("xpath_element_scraper: Got filter element, scroll from top was " + scroll_y)
 | 
			
		||||
            } else {
 | 
			
		||||
@@ -205,8 +192,7 @@ if (include_filters.length) {
 | 
			
		||||
                    bbox = q.ownerElement.getBoundingClientRect();
 | 
			
		||||
                    console.log("xpath_element_scraper: Got filter by ownerElement element, scroll from top was " + scroll_y)
 | 
			
		||||
                } catch (e) {
 | 
			
		||||
                    console.log(e)
 | 
			
		||||
                    console.log("xpath_element_scraper: error looking up q.ownerElement")
 | 
			
		||||
                    console.log("xpath_element_scraper: error looking up ownerElement")
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 
 | 
			
		||||
@@ -1,44 +0,0 @@
 | 
			
		||||
#!/bin/bash
 | 
			
		||||
 | 
			
		||||
# run some tests and look if the 'custom-browser-search-string=1' connect string appeared in the correct containers
 | 
			
		||||
 | 
			
		||||
# enable debug
 | 
			
		||||
set -x
 | 
			
		||||
 | 
			
		||||
# A extra browser is configured, but we never chose to use it, so it should NOT show in the logs
 | 
			
		||||
docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio;pytest tests/custom_browser_url/test_custom_browser_url.py::test_request_not_via_custom_browser_url'
 | 
			
		||||
docker logs browserless-custom-url &>log.txt
 | 
			
		||||
grep 'custom-browser-search-string=1' log.txt
 | 
			
		||||
if [ $? -ne 1 ]
 | 
			
		||||
then
 | 
			
		||||
  echo "Saw a request in 'browserless-custom-url' container with 'custom-browser-search-string=1' when I should not"
 | 
			
		||||
  exit 1
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
docker logs browserless &>log.txt
 | 
			
		||||
grep 'custom-browser-search-string=1' log.txt
 | 
			
		||||
if [ $? -ne 1 ]
 | 
			
		||||
then
 | 
			
		||||
  echo "Saw a request in 'browser' container with 'custom-browser-search-string=1' when I should not"
 | 
			
		||||
  exit 1
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
# Special connect string should appear in the custom-url container, but not in the 'default' one
 | 
			
		||||
docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio  bash -c 'cd changedetectionio;pytest tests/custom_browser_url/test_custom_browser_url.py::test_request_via_custom_browser_url'
 | 
			
		||||
docker logs browserless-custom-url &>log.txt
 | 
			
		||||
grep 'custom-browser-search-string=1' log.txt
 | 
			
		||||
if [ $? -ne 0 ]
 | 
			
		||||
then
 | 
			
		||||
  echo "Did not see request in 'browserless-custom-url' container with 'custom-browser-search-string=1' when I should"
 | 
			
		||||
  exit 1
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
docker logs browserless &>log.txt
 | 
			
		||||
grep 'custom-browser-search-string=1' log.txt
 | 
			
		||||
if [ $? -ne 1 ]
 | 
			
		||||
then
 | 
			
		||||
  echo "Saw a request in 'browser' container with 'custom-browser-search-string=1' when I should not"
 | 
			
		||||
  exit 1
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -321,14 +321,8 @@ $(document).ready(function () {
 | 
			
		||||
            var s = '<div class="control">' + '<a data-step-index=' + i + ' class="pure-button button-secondary button-green button-xsmall apply" >Apply</a> ';
 | 
			
		||||
            if (i > 0) {
 | 
			
		||||
                // The first step never gets these (Goto-site)
 | 
			
		||||
                s += `<a data-step-index="${i}" class="pure-button button-secondary button-xsmall clear" >Clear</a> ` +
 | 
			
		||||
                    `<a data-step-index="${i}" class="pure-button button-secondary button-red button-xsmall remove" >Remove</a>`;
 | 
			
		||||
 | 
			
		||||
                // if a screenshot is available
 | 
			
		||||
                if (browser_steps_available_screenshots.includes(i.toString())) {
 | 
			
		||||
                    var d = (browser_steps_last_error_step === i+1) ? 'before' : 'after';
 | 
			
		||||
                    s += ` <a data-step-index="${i}" class="pure-button button-secondary button-xsmall show-screenshot" title="Show screenshot from last run" data-type="${d}">Pic</a> `;
 | 
			
		||||
                }
 | 
			
		||||
                s += '<a data-step-index=' + i + ' class="pure-button button-secondary button-xsmall clear" >Clear</a> ' +
 | 
			
		||||
                    '<a data-step-index=' + i + ' class="pure-button button-secondary button-red button-xsmall remove" >Remove</a>';
 | 
			
		||||
            }
 | 
			
		||||
            s += '</div>';
 | 
			
		||||
            $(this).append(s)
 | 
			
		||||
@@ -443,24 +437,6 @@ $(document).ready(function () {
 | 
			
		||||
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    $('ul#browser_steps li .control .show-screenshot').click(function (element) {
 | 
			
		||||
        var step_n = $(event.currentTarget).data('step-index');
 | 
			
		||||
        w = window.open(this.href, "_blank", "width=640,height=480");
 | 
			
		||||
        const t = $(event.currentTarget).data('type');
 | 
			
		||||
 | 
			
		||||
        const url = browser_steps_fetch_screenshot_image_url + `&step_n=${step_n}&type=${t}`;
 | 
			
		||||
        w.document.body.innerHTML = `<!DOCTYPE html>
 | 
			
		||||
            <html lang="en">
 | 
			
		||||
                <body>
 | 
			
		||||
                    <img src="${url}" style="width: 100%" alt="Browser Step at step ${step_n} from last run." title="Browser Step at step ${step_n} from last run."/>
 | 
			
		||||
                </body>
 | 
			
		||||
        </html>`;
 | 
			
		||||
        w.document.title = `Browser Step at step ${step_n} from last run.`;
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    if (browser_steps_last_error_step) {
 | 
			
		||||
        $("ul#browser_steps>li:nth-child("+browser_steps_last_error_step+")").addClass("browser-step-with-error");
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    $("ul#browser_steps select").change(function () {
 | 
			
		||||
        set_greyed_state();
 | 
			
		||||
 
 | 
			
		||||
@@ -1,120 +1,110 @@
 | 
			
		||||
$(document).ready(function () {
 | 
			
		||||
    var a = document.getElementById("a");
 | 
			
		||||
    var b = document.getElementById("b");
 | 
			
		||||
    var result = document.getElementById("result");
 | 
			
		||||
    var inputs;
 | 
			
		||||
var a = document.getElementById("a");
 | 
			
		||||
var b = document.getElementById("b");
 | 
			
		||||
var result = document.getElementById("result");
 | 
			
		||||
 | 
			
		||||
    $('#jump-next-diff').click(function () {
 | 
			
		||||
function changed() {
 | 
			
		||||
  // https://github.com/kpdecker/jsdiff/issues/389
 | 
			
		||||
  // I would love to use `{ignoreWhitespace: true}` here but it breaks the formatting
 | 
			
		||||
  options = {
 | 
			
		||||
    ignoreWhitespace: document.getElementById("ignoreWhitespace").checked,
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
        var element = inputs[inputs.current];
 | 
			
		||||
        var headerOffset = 80;
 | 
			
		||||
        var elementPosition = element.getBoundingClientRect().top;
 | 
			
		||||
        var offsetPosition = elementPosition - headerOffset + window.scrollY;
 | 
			
		||||
 | 
			
		||||
        window.scrollTo({
 | 
			
		||||
            top: offsetPosition,
 | 
			
		||||
            behavior: "smooth",
 | 
			
		||||
        });
 | 
			
		||||
 | 
			
		||||
        inputs.current++;
 | 
			
		||||
        if (inputs.current >= inputs.length) {
 | 
			
		||||
            inputs.current = 0;
 | 
			
		||||
        }
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    function changed() {
 | 
			
		||||
        // https://github.com/kpdecker/jsdiff/issues/389
 | 
			
		||||
        // I would love to use `{ignoreWhitespace: true}` here but it breaks the formatting
 | 
			
		||||
        options = {
 | 
			
		||||
            ignoreWhitespace: document.getElementById("ignoreWhitespace").checked,
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        var diff = Diff[window.diffType](a.textContent, b.textContent, options);
 | 
			
		||||
        var fragment = document.createDocumentFragment();
 | 
			
		||||
        for (var i = 0; i < diff.length; i++) {
 | 
			
		||||
            if (diff[i].added && diff[i + 1] && diff[i + 1].removed) {
 | 
			
		||||
                var swap = diff[i];
 | 
			
		||||
                diff[i] = diff[i + 1];
 | 
			
		||||
                diff[i + 1] = swap;
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            var node;
 | 
			
		||||
            if (diff[i].removed) {
 | 
			
		||||
                node = document.createElement("del");
 | 
			
		||||
                node.classList.add("change");
 | 
			
		||||
                const wrapper = node.appendChild(document.createElement("span"));
 | 
			
		||||
                wrapper.appendChild(document.createTextNode(diff[i].value));
 | 
			
		||||
            } else if (diff[i].added) {
 | 
			
		||||
                node = document.createElement("ins");
 | 
			
		||||
                node.classList.add("change");
 | 
			
		||||
                const wrapper = node.appendChild(document.createElement("span"));
 | 
			
		||||
                wrapper.appendChild(document.createTextNode(diff[i].value));
 | 
			
		||||
            } else {
 | 
			
		||||
                node = document.createTextNode(diff[i].value);
 | 
			
		||||
            }
 | 
			
		||||
            fragment.appendChild(node);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        result.textContent = "";
 | 
			
		||||
        result.appendChild(fragment);
 | 
			
		||||
 | 
			
		||||
        // For nice mouse-over hover/title information
 | 
			
		||||
        const removed_current_option = $('#diff-version option:selected')
 | 
			
		||||
        if (removed_current_option) {
 | 
			
		||||
            $('del').each(function () {
 | 
			
		||||
                $(this).prop('title', 'Removed '+removed_current_option[0].label);
 | 
			
		||||
            });
 | 
			
		||||
        }
 | 
			
		||||
        const inserted_current_option = $('#current-version option:selected')
 | 
			
		||||
        if (removed_current_option) {
 | 
			
		||||
            $('ins').each(function () {
 | 
			
		||||
                $(this).prop('title', 'Inserted '+inserted_current_option[0].label);
 | 
			
		||||
            });
 | 
			
		||||
        }
 | 
			
		||||
        // Set the list of possible differences to jump to
 | 
			
		||||
        inputs = document.querySelectorAll('#diff-ui .change')
 | 
			
		||||
        // Set the "current" diff pointer
 | 
			
		||||
        inputs.current = 0;
 | 
			
		||||
        // Goto diff
 | 
			
		||||
        $('#jump-next-diff').click();
 | 
			
		||||
  var diff = Diff[window.diffType](a.textContent, b.textContent, options);
 | 
			
		||||
  var fragment = document.createDocumentFragment();
 | 
			
		||||
  for (var i = 0; i < diff.length; i++) {
 | 
			
		||||
    if (diff[i].added && diff[i + 1] && diff[i + 1].removed) {
 | 
			
		||||
      var swap = diff[i];
 | 
			
		||||
      diff[i] = diff[i + 1];
 | 
			
		||||
      diff[i + 1] = swap;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    $('.needs-localtime').each(function () {
 | 
			
		||||
        for (var option of this.options) {
 | 
			
		||||
            var dateObject = new Date(option.value * 1000);
 | 
			
		||||
            option.label = dateObject.toLocaleString(undefined, {dateStyle: "full", timeStyle: "medium"});
 | 
			
		||||
        }
 | 
			
		||||
    })
 | 
			
		||||
    onDiffTypeChange(
 | 
			
		||||
        document.querySelector('#settings [name="diff_type"]:checked'),
 | 
			
		||||
    );
 | 
			
		||||
    changed();
 | 
			
		||||
 | 
			
		||||
    a.onpaste = a.onchange = b.onpaste = b.onchange = changed;
 | 
			
		||||
 | 
			
		||||
    if ("oninput" in a) {
 | 
			
		||||
        a.oninput = b.oninput = changed;
 | 
			
		||||
    var node;
 | 
			
		||||
    if (diff[i].removed) {
 | 
			
		||||
      node = document.createElement("del");
 | 
			
		||||
      node.classList.add("change");
 | 
			
		||||
      const wrapper = node.appendChild(document.createElement("span"));
 | 
			
		||||
      wrapper.appendChild(document.createTextNode(diff[i].value));
 | 
			
		||||
    } else if (diff[i].added) {
 | 
			
		||||
      node = document.createElement("ins");
 | 
			
		||||
      node.classList.add("change");
 | 
			
		||||
      const wrapper = node.appendChild(document.createElement("span"));
 | 
			
		||||
      wrapper.appendChild(document.createTextNode(diff[i].value));
 | 
			
		||||
    } else {
 | 
			
		||||
        a.onkeyup = b.onkeyup = changed;
 | 
			
		||||
      node = document.createTextNode(diff[i].value);
 | 
			
		||||
    }
 | 
			
		||||
    fragment.appendChild(node);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
    function onDiffTypeChange(radio) {
 | 
			
		||||
        window.diffType = radio.value;
 | 
			
		||||
        // Not necessary
 | 
			
		||||
        //	document.title = "Diff " + radio.value.slice(4);
 | 
			
		||||
  result.textContent = "";
 | 
			
		||||
  result.appendChild(fragment);
 | 
			
		||||
 | 
			
		||||
  // Jump at start
 | 
			
		||||
  inputs.current = 0;
 | 
			
		||||
  next_diff();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
window.onload = function () {
 | 
			
		||||
  /* Convert what is options from UTC time.time() to local browser time */
 | 
			
		||||
  var diffList = document.getElementById("diff-version");
 | 
			
		||||
  if (typeof diffList != "undefined" && diffList != null) {
 | 
			
		||||
    for (var option of diffList.options) {
 | 
			
		||||
      var dateObject = new Date(option.value * 1000);
 | 
			
		||||
      option.label = dateObject.toLocaleString();
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
    var radio = document.getElementsByName("diff_type");
 | 
			
		||||
    for (var i = 0; i < radio.length; i++) {
 | 
			
		||||
        radio[i].onchange = function (e) {
 | 
			
		||||
            onDiffTypeChange(e.target);
 | 
			
		||||
            changed();
 | 
			
		||||
        };
 | 
			
		||||
    }
 | 
			
		||||
  /* Set current version date as local time in the browser also */
 | 
			
		||||
  var current_v = document.getElementById("current-v-date");
 | 
			
		||||
  var dateObject = new Date(newest_version_timestamp * 1000);
 | 
			
		||||
  current_v.innerHTML = dateObject.toLocaleString();
 | 
			
		||||
  onDiffTypeChange(
 | 
			
		||||
    document.querySelector('#settings [name="diff_type"]:checked'),
 | 
			
		||||
  );
 | 
			
		||||
  changed();
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
    document.getElementById("ignoreWhitespace").onchange = function (e) {
 | 
			
		||||
        changed();
 | 
			
		||||
    };
 | 
			
		||||
a.onpaste = a.onchange = b.onpaste = b.onchange = changed;
 | 
			
		||||
 | 
			
		||||
});
 | 
			
		||||
if ("oninput" in a) {
 | 
			
		||||
  a.oninput = b.oninput = changed;
 | 
			
		||||
} else {
 | 
			
		||||
  a.onkeyup = b.onkeyup = changed;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
function onDiffTypeChange(radio) {
 | 
			
		||||
  window.diffType = radio.value;
 | 
			
		||||
  // Not necessary
 | 
			
		||||
  //	document.title = "Diff " + radio.value.slice(4);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
var radio = document.getElementsByName("diff_type");
 | 
			
		||||
for (var i = 0; i < radio.length; i++) {
 | 
			
		||||
  radio[i].onchange = function (e) {
 | 
			
		||||
    onDiffTypeChange(e.target);
 | 
			
		||||
    changed();
 | 
			
		||||
  };
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
document.getElementById("ignoreWhitespace").onchange = function (e) {
 | 
			
		||||
  changed();
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
var inputs = document.getElementsByClassName("change");
 | 
			
		||||
inputs.current = 0;
 | 
			
		||||
 | 
			
		||||
function next_diff() {
 | 
			
		||||
  var element = inputs[inputs.current];
 | 
			
		||||
  var headerOffset = 80;
 | 
			
		||||
  var elementPosition = element.getBoundingClientRect().top;
 | 
			
		||||
  var offsetPosition = elementPosition - headerOffset + window.scrollY;
 | 
			
		||||
 | 
			
		||||
  window.scrollTo({
 | 
			
		||||
    top: offsetPosition,
 | 
			
		||||
    behavior: "smooth",
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  inputs.current++;
 | 
			
		||||
  if (inputs.current >= inputs.length) {
 | 
			
		||||
    inputs.current = 0;
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -3,50 +3,45 @@
 | 
			
		||||
 * Toggles theme between light and dark mode.
 | 
			
		||||
 */
 | 
			
		||||
$(document).ready(function () {
 | 
			
		||||
    const button = document.getElementById("toggle-light-mode");
 | 
			
		||||
  const button = document.getElementById("toggle-light-mode");
 | 
			
		||||
 | 
			
		||||
    button.onclick = () => {
 | 
			
		||||
        const htmlElement = document.getElementsByTagName("html");
 | 
			
		||||
        const isDarkMode = htmlElement[0].dataset.darkmode === "true";
 | 
			
		||||
        htmlElement[0].dataset.darkmode = !isDarkMode;
 | 
			
		||||
        setCookieValue(!isDarkMode);
 | 
			
		||||
    };
 | 
			
		||||
  button.onclick = () => {
 | 
			
		||||
    const htmlElement = document.getElementsByTagName("html");
 | 
			
		||||
    const isDarkMode = htmlElement[0].dataset.darkmode === "true";
 | 
			
		||||
    htmlElement[0].dataset.darkmode = !isDarkMode;
 | 
			
		||||
    setCookieValue(!isDarkMode);
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
    const setCookieValue = (value) => {
 | 
			
		||||
        document.cookie = `css_dark_mode=${value};max-age=31536000;path=/`
 | 
			
		||||
    }
 | 
			
		||||
  const setCookieValue = (value) => {
 | 
			
		||||
    document.cookie = `css_dark_mode=${value};max-age=31536000;path=/`
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
    // Search input box behaviour
 | 
			
		||||
  // Search input box behaviour
 | 
			
		||||
    const toggle_search = document.getElementById("toggle-search");
 | 
			
		||||
    const search_q = document.getElementById("search-q");
 | 
			
		||||
    if(search_q) {
 | 
			
		||||
      window.addEventListener('keydown', function (e) {
 | 
			
		||||
        if (e.altKey == true && e.keyCode == 83) {
 | 
			
		||||
          search_q.classList.toggle('expanded');
 | 
			
		||||
          search_q.focus();
 | 
			
		||||
        }
 | 
			
		||||
      });
 | 
			
		||||
  const search_q = document.getElementById("search-q");
 | 
			
		||||
  window.addEventListener('keydown', function (e) {
 | 
			
		||||
 | 
			
		||||
      search_q.onkeydown = (e) => {
 | 
			
		||||
        var key = e.keyCode || e.which;
 | 
			
		||||
        if (key === 13) {
 | 
			
		||||
          document.searchForm.submit();
 | 
			
		||||
        }
 | 
			
		||||
      };
 | 
			
		||||
      toggle_search.onclick = () => {
 | 
			
		||||
        // Could be that they want to search something once text is in there
 | 
			
		||||
        if (search_q.value.length) {
 | 
			
		||||
          document.searchForm.submit();
 | 
			
		||||
        } else {
 | 
			
		||||
          // If not..
 | 
			
		||||
          search_q.classList.toggle('expanded');
 | 
			
		||||
          search_q.focus();
 | 
			
		||||
        }
 | 
			
		||||
      };
 | 
			
		||||
    if (e.altKey == true && e.keyCode == 83)
 | 
			
		||||
      search_q.classList.toggle('expanded');
 | 
			
		||||
      search_q.focus();
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  search_q.onkeydown = (e) => {
 | 
			
		||||
    var key = e.keyCode || e.which;
 | 
			
		||||
    if (key === 13) {
 | 
			
		||||
      document.searchForm.submit();
 | 
			
		||||
    }
 | 
			
		||||
  };
 | 
			
		||||
  toggle_search.onclick = () => {
 | 
			
		||||
    // Could be that they want to search something once text is in there
 | 
			
		||||
    if (search_q.value.length) {
 | 
			
		||||
      document.searchForm.submit();
 | 
			
		||||
    } else {
 | 
			
		||||
      // If not..
 | 
			
		||||
      search_q.classList.toggle('expanded');
 | 
			
		||||
      search_q.focus();
 | 
			
		||||
    }
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
    $('#heart-us').click(function () {
 | 
			
		||||
        $("#overlay").toggleClass('visible');
 | 
			
		||||
        heartpath.style.fill = document.getElementById("overlay").classList.contains("visible") ? '#ff0000' : 'var(--color-background)';
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 
 | 
			
		||||
@@ -149,7 +149,7 @@ $(document).ready(function () {
 | 
			
		||||
            // @todo In the future paint all that match
 | 
			
		||||
            for (const c of current_default_xpath) {
 | 
			
		||||
                for (var i = selector_data['size_pos'].length; i !== 0; i--) {
 | 
			
		||||
                    if (selector_data['size_pos'][i - 1].xpath.trim() === c.trim()) {
 | 
			
		||||
                    if (selector_data['size_pos'][i - 1].xpath === c) {
 | 
			
		||||
                        console.log("highlighting " + c);
 | 
			
		||||
                        current_selected_i = i - 1;
 | 
			
		||||
                        highlight_current_selected_i();
 | 
			
		||||
 
 | 
			
		||||
@@ -4,14 +4,6 @@ $(function () {
 | 
			
		||||
        $(this).closest('.unviewed').removeClass('unviewed');
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    $('td[data-timestamp]').each(function () {
 | 
			
		||||
        $(this).prop('title', new Intl.DateTimeFormat(undefined,
 | 
			
		||||
            {
 | 
			
		||||
                dateStyle: 'full',
 | 
			
		||||
                timeStyle: 'long'
 | 
			
		||||
            }).format($(this).data('timestamp') * 1000));
 | 
			
		||||
    })
 | 
			
		||||
 | 
			
		||||
    $("#checkbox-assign-tag").click(function (e) {
 | 
			
		||||
        $('#op_extradata').val(prompt("Enter a tag name"));
 | 
			
		||||
    });
 | 
			
		||||
 
 | 
			
		||||
@@ -187,10 +187,6 @@ ins {
 | 
			
		||||
    padding: 0.5em; }
 | 
			
		||||
  #settings ins {
 | 
			
		||||
    padding: 0.5em; }
 | 
			
		||||
  #settings option:checked {
 | 
			
		||||
    font-weight: bold; }
 | 
			
		||||
  #settings [type=radio], #settings [type=checkbox] {
 | 
			
		||||
    vertical-align: middle; }
 | 
			
		||||
 | 
			
		||||
.source {
 | 
			
		||||
  position: absolute;
 | 
			
		||||
 
 | 
			
		||||
@@ -77,13 +77,6 @@ ins {
 | 
			
		||||
  ins {
 | 
			
		||||
    padding: 0.5em;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  option:checked {
 | 
			
		||||
    font-weight: bold;
 | 
			
		||||
  }
 | 
			
		||||
  [type=radio],[type=checkbox] {
 | 
			
		||||
    vertical-align: middle;
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.source {
 | 
			
		||||
 
 | 
			
		||||
@@ -6,10 +6,6 @@
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  li {
 | 
			
		||||
    &.browser-step-with-error {
 | 
			
		||||
      background-color: #ffd6d6;
 | 
			
		||||
      border-radius: 4px;
 | 
			
		||||
    }
 | 
			
		||||
    &:not(:first-child) {
 | 
			
		||||
      &:hover {
 | 
			
		||||
        opacity: 1.0;
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
 | 
			
		||||
#toggle-light-mode {
 | 
			
		||||
/*  width: 3rem;*/
 | 
			
		||||
  width: 3rem;
 | 
			
		||||
  /* default */
 | 
			
		||||
  .icon-dark {
 | 
			
		||||
    display: none;
 | 
			
		||||
 
 | 
			
		||||
@@ -1,24 +0,0 @@
 | 
			
		||||
ul#requests-extra_browsers {
 | 
			
		||||
  list-style: none;
 | 
			
		||||
  /* tidy up the table to look more "inline" */
 | 
			
		||||
  li {
 | 
			
		||||
    > label {
 | 
			
		||||
      display: none;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /* each proxy entry is a `table` */
 | 
			
		||||
  table {
 | 
			
		||||
    tr {
 | 
			
		||||
      display: inline;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#extra-browsers-setting {
 | 
			
		||||
  border: 1px solid var(--color-grey-800);
 | 
			
		||||
  border-radius: 4px;
 | 
			
		||||
  margin: 1em;
 | 
			
		||||
   padding: 1em;
 | 
			
		||||
}
 | 
			
		||||
@@ -60,10 +60,3 @@ body.proxy-check-active {
 | 
			
		||||
 | 
			
		||||
  padding-bottom: 1em;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#extra-proxies-setting {
 | 
			
		||||
  border: 1px solid var(--color-grey-800);
 | 
			
		||||
  border-radius: 4px;
 | 
			
		||||
    margin: 1em;
 | 
			
		||||
   padding: 1em;
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,38 +0,0 @@
 | 
			
		||||
#overlay {
 | 
			
		||||
 | 
			
		||||
  opacity: 0.95;
 | 
			
		||||
  position: fixed;
 | 
			
		||||
 | 
			
		||||
  width: 350px;
 | 
			
		||||
  max-width: 100%;
 | 
			
		||||
  height: 100%;
 | 
			
		||||
  top: 0;
 | 
			
		||||
  right: -350px;
 | 
			
		||||
  background-color: var(--color-table-stripe);
 | 
			
		||||
  z-index: 2;
 | 
			
		||||
 | 
			
		||||
  transform: translateX(0);
 | 
			
		||||
  transition: transform .5s ease;
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  &.visible {
 | 
			
		||||
    transform: translateX(-100%);
 | 
			
		||||
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  .content {
 | 
			
		||||
    font-size: 0.875rem;
 | 
			
		||||
    padding: 1rem;
 | 
			
		||||
    margin-top: 5rem;
 | 
			
		||||
    max-width: 400px;
 | 
			
		||||
    color: var(--color-watch-table-row-text);
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#heartpath {
 | 
			
		||||
  &:hover {
 | 
			
		||||
    fill: #ff0000 !important;
 | 
			
		||||
    transition: all ease 0.3s !important;
 | 
			
		||||
  }
 | 
			
		||||
  transition: all ease 0.3s !important;
 | 
			
		||||
}
 | 
			
		||||
@@ -1,25 +0,0 @@
 | 
			
		||||
.pure-menu-link {
 | 
			
		||||
  padding: 0.5rem 1em;
 | 
			
		||||
  line-height: 1.2rem;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.pure-menu-item {
 | 
			
		||||
  svg {
 | 
			
		||||
    height: 1.2rem;
 | 
			
		||||
  }
 | 
			
		||||
  * {
 | 
			
		||||
    vertical-align: middle;
 | 
			
		||||
  }
 | 
			
		||||
  .github-link {
 | 
			
		||||
    height: 1.8rem;
 | 
			
		||||
    display: block;
 | 
			
		||||
    svg {
 | 
			
		||||
      height: 100%;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
  .bi-heart {
 | 
			
		||||
    &:hover {
 | 
			
		||||
      cursor: pointer;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,28 +0,0 @@
 | 
			
		||||
 | 
			
		||||
#selector-wrapper {
 | 
			
		||||
  height: 100%;
 | 
			
		||||
  max-height: 70vh;
 | 
			
		||||
  overflow-y: scroll;
 | 
			
		||||
  position: relative;
 | 
			
		||||
 | 
			
		||||
  //width: 100%;
 | 
			
		||||
  >img {
 | 
			
		||||
    position: absolute;
 | 
			
		||||
    z-index: 4;
 | 
			
		||||
    max-width: 100%;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  >canvas {
 | 
			
		||||
    position: relative;
 | 
			
		||||
    z-index: 5;
 | 
			
		||||
    max-width: 100%;
 | 
			
		||||
 | 
			
		||||
    &:hover {
 | 
			
		||||
      cursor: pointer;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#selector-current-xpath {
 | 
			
		||||
  font-size: 80%;
 | 
			
		||||
}
 | 
			
		||||
@@ -5,18 +5,14 @@
 | 
			
		||||
@import "parts/_arrows";
 | 
			
		||||
@import "parts/_browser-steps";
 | 
			
		||||
@import "parts/_extra_proxies";
 | 
			
		||||
@import "parts/_extra_browsers";
 | 
			
		||||
@import "parts/_pagination";
 | 
			
		||||
@import "parts/_spinners";
 | 
			
		||||
@import "parts/_variables";
 | 
			
		||||
@import "parts/_darkmode";
 | 
			
		||||
@import "parts/_menu";
 | 
			
		||||
@import "parts/_love";
 | 
			
		||||
 | 
			
		||||
body {
 | 
			
		||||
  color: var(--color-text);
 | 
			
		||||
  background: var(--color-background-page);
 | 
			
		||||
  font-family: Helvetica Neue, Helvetica, Lucida Grande, Arial, Ubuntu, Cantarell, Fira Sans, sans-serif;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.visually-hidden {
 | 
			
		||||
@@ -59,6 +55,11 @@ a.github-link {
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
#toggle-search {
 | 
			
		||||
  width: 2rem;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#search-q {
 | 
			
		||||
  opacity: 0;
 | 
			
		||||
  -webkit-transition: all .9s ease;
 | 
			
		||||
@@ -470,11 +471,7 @@ footer {
 | 
			
		||||
  padding: 10px;
 | 
			
		||||
 | 
			
		||||
  &#left-sticky {
 | 
			
		||||
    left: 0;
 | 
			
		||||
    position: fixed;
 | 
			
		||||
    border-top-right-radius: 5px;
 | 
			
		||||
    border-bottom-right-radius: 5px;
 | 
			
		||||
    box-shadow: 1px 1px 4px var(--color-shadow-jump);
 | 
			
		||||
    left: 0px;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  &#right-sticky {
 | 
			
		||||
@@ -942,7 +939,32 @@ ul {
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@import "parts/_visualselector";
 | 
			
		||||
#selector-wrapper {
 | 
			
		||||
  height: 100%;
 | 
			
		||||
  overflow-y: scroll;
 | 
			
		||||
  position: relative;
 | 
			
		||||
 | 
			
		||||
  //width: 100%;
 | 
			
		||||
  >img {
 | 
			
		||||
    position: absolute;
 | 
			
		||||
    z-index: 4;
 | 
			
		||||
    max-width: 100%;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  >canvas {
 | 
			
		||||
    position: relative;
 | 
			
		||||
    z-index: 5;
 | 
			
		||||
    max-width: 100%;
 | 
			
		||||
 | 
			
		||||
    &:hover {
 | 
			
		||||
      cursor: pointer;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#selector-current-xpath {
 | 
			
		||||
  font-size: 80%;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#webdriver-override-options {
 | 
			
		||||
  input[type="number"] {
 | 
			
		||||
@@ -1081,4 +1103,3 @@ ul {
 | 
			
		||||
  border-radius: 3px;
 | 
			
		||||
  white-space: nowrap;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -26,9 +26,6 @@
 | 
			
		||||
  #browser_steps li {
 | 
			
		||||
    list-style: decimal;
 | 
			
		||||
    padding: 5px; }
 | 
			
		||||
    #browser_steps li.browser-step-with-error {
 | 
			
		||||
      background-color: #ffd6d6;
 | 
			
		||||
      border-radius: 4px; }
 | 
			
		||||
    #browser_steps li:not(:first-child):hover {
 | 
			
		||||
      opacity: 1.0; }
 | 
			
		||||
    #browser_steps li .control {
 | 
			
		||||
@@ -128,27 +125,6 @@ body.proxy-check-active #request .proxy-timing {
 | 
			
		||||
    border-radius: 4px;
 | 
			
		||||
    padding: 1em; }
 | 
			
		||||
 | 
			
		||||
#extra-proxies-setting {
 | 
			
		||||
  border: 1px solid var(--color-grey-800);
 | 
			
		||||
  border-radius: 4px;
 | 
			
		||||
  margin: 1em;
 | 
			
		||||
  padding: 1em; }
 | 
			
		||||
 | 
			
		||||
ul#requests-extra_browsers {
 | 
			
		||||
  list-style: none;
 | 
			
		||||
  /* tidy up the table to look more "inline" */
 | 
			
		||||
  /* each proxy entry is a `table` */ }
 | 
			
		||||
  ul#requests-extra_browsers li > label {
 | 
			
		||||
    display: none; }
 | 
			
		||||
  ul#requests-extra_browsers table tr {
 | 
			
		||||
    display: inline; }
 | 
			
		||||
 | 
			
		||||
#extra-browsers-setting {
 | 
			
		||||
  border: 1px solid var(--color-grey-800);
 | 
			
		||||
  border-radius: 4px;
 | 
			
		||||
  margin: 1em;
 | 
			
		||||
  padding: 1em; }
 | 
			
		||||
 | 
			
		||||
.pagination-page-info {
 | 
			
		||||
  color: #fff;
 | 
			
		||||
  font-size: 0.85rem;
 | 
			
		||||
@@ -352,7 +328,7 @@ html[data-darkmode="true"] {
 | 
			
		||||
      color: var(--color-watch-table-error); }
 | 
			
		||||
 | 
			
		||||
#toggle-light-mode {
 | 
			
		||||
  /*  width: 3rem;*/
 | 
			
		||||
  width: 3rem;
 | 
			
		||||
  /* default */ }
 | 
			
		||||
  #toggle-light-mode .icon-dark {
 | 
			
		||||
    display: none; }
 | 
			
		||||
@@ -363,56 +339,9 @@ html[data-darkmode="true"] #toggle-light-mode .icon-light {
 | 
			
		||||
html[data-darkmode="true"] #toggle-light-mode .icon-dark {
 | 
			
		||||
  display: block; }
 | 
			
		||||
 | 
			
		||||
.pure-menu-link {
 | 
			
		||||
  padding: 0.5rem 1em;
 | 
			
		||||
  line-height: 1.2rem; }
 | 
			
		||||
 | 
			
		||||
.pure-menu-item svg {
 | 
			
		||||
  height: 1.2rem; }
 | 
			
		||||
 | 
			
		||||
.pure-menu-item * {
 | 
			
		||||
  vertical-align: middle; }
 | 
			
		||||
 | 
			
		||||
.pure-menu-item .github-link {
 | 
			
		||||
  height: 1.8rem;
 | 
			
		||||
  display: block; }
 | 
			
		||||
  .pure-menu-item .github-link svg {
 | 
			
		||||
    height: 100%; }
 | 
			
		||||
 | 
			
		||||
.pure-menu-item .bi-heart:hover {
 | 
			
		||||
  cursor: pointer; }
 | 
			
		||||
 | 
			
		||||
#overlay {
 | 
			
		||||
  opacity: 0.95;
 | 
			
		||||
  position: fixed;
 | 
			
		||||
  width: 350px;
 | 
			
		||||
  max-width: 100%;
 | 
			
		||||
  height: 100%;
 | 
			
		||||
  top: 0;
 | 
			
		||||
  right: -350px;
 | 
			
		||||
  background-color: var(--color-table-stripe);
 | 
			
		||||
  z-index: 2;
 | 
			
		||||
  transform: translateX(0);
 | 
			
		||||
  transition: transform .5s ease; }
 | 
			
		||||
  #overlay.visible {
 | 
			
		||||
    transform: translateX(-100%); }
 | 
			
		||||
  #overlay .content {
 | 
			
		||||
    font-size: 0.875rem;
 | 
			
		||||
    padding: 1rem;
 | 
			
		||||
    margin-top: 5rem;
 | 
			
		||||
    max-width: 400px;
 | 
			
		||||
    color: var(--color-watch-table-row-text); }
 | 
			
		||||
 | 
			
		||||
#heartpath {
 | 
			
		||||
  transition: all ease 0.3s !important; }
 | 
			
		||||
  #heartpath:hover {
 | 
			
		||||
    fill: #ff0000 !important;
 | 
			
		||||
    transition: all ease 0.3s !important; }
 | 
			
		||||
 | 
			
		||||
body {
 | 
			
		||||
  color: var(--color-text);
 | 
			
		||||
  background: var(--color-background-page);
 | 
			
		||||
  font-family: Helvetica Neue, Helvetica, Lucida Grande, Arial, Ubuntu, Cantarell, Fira Sans, sans-serif; }
 | 
			
		||||
  background: var(--color-background-page); }
 | 
			
		||||
 | 
			
		||||
.visually-hidden {
 | 
			
		||||
  clip: rect(0 0 0 0);
 | 
			
		||||
@@ -444,6 +373,9 @@ a.github-link {
 | 
			
		||||
  a.github-link:hover {
 | 
			
		||||
    color: var(--color-icon-github-hover); }
 | 
			
		||||
 | 
			
		||||
#toggle-search {
 | 
			
		||||
  width: 2rem; }
 | 
			
		||||
 | 
			
		||||
#search-q {
 | 
			
		||||
  opacity: 0;
 | 
			
		||||
  -webkit-transition: all .9s ease;
 | 
			
		||||
@@ -735,11 +667,7 @@ footer {
 | 
			
		||||
  background: var(--color-background);
 | 
			
		||||
  padding: 10px; }
 | 
			
		||||
  .sticky-tab#left-sticky {
 | 
			
		||||
    left: 0;
 | 
			
		||||
    position: fixed;
 | 
			
		||||
    border-top-right-radius: 5px;
 | 
			
		||||
    border-bottom-right-radius: 5px;
 | 
			
		||||
    box-shadow: 1px 1px 4px var(--color-shadow-jump); }
 | 
			
		||||
    left: 0px; }
 | 
			
		||||
  .sticky-tab#right-sticky {
 | 
			
		||||
    right: 0px; }
 | 
			
		||||
  .sticky-tab#hosted-sticky {
 | 
			
		||||
@@ -1048,7 +976,6 @@ ul {
 | 
			
		||||
 | 
			
		||||
#selector-wrapper {
 | 
			
		||||
  height: 100%;
 | 
			
		||||
  max-height: 70vh;
 | 
			
		||||
  overflow-y: scroll;
 | 
			
		||||
  position: relative; }
 | 
			
		||||
  #selector-wrapper > img {
 | 
			
		||||
 
 | 
			
		||||
@@ -96,14 +96,6 @@ class ChangeDetectionStore:
 | 
			
		||||
                self.add_watch(url='https://changedetection.io/CHANGELOG.txt',
 | 
			
		||||
                               tag='changedetection.io',
 | 
			
		||||
                               extras={'fetch_backend': 'html_requests'})
 | 
			
		||||
 | 
			
		||||
            updates_available = self.get_updates_available()
 | 
			
		||||
            self.__data['settings']['application']['schema_version'] = updates_available.pop()
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            # Bump the update version by running updates
 | 
			
		||||
            self.run_updates()
 | 
			
		||||
 | 
			
		||||
        self.__data['version_tag'] = version_tag
 | 
			
		||||
 | 
			
		||||
        # Just to test that proxies.json if it exists, doesnt throw a parsing error on startup
 | 
			
		||||
@@ -133,6 +125,9 @@ class ChangeDetectionStore:
 | 
			
		||||
            secret = secrets.token_hex(16)
 | 
			
		||||
            self.__data['settings']['application']['api_access_token'] = secret
 | 
			
		||||
 | 
			
		||||
        # Bump the update version by running updates
 | 
			
		||||
        self.run_updates()
 | 
			
		||||
 | 
			
		||||
        self.needs_write = True
 | 
			
		||||
 | 
			
		||||
        # Finally start the thread that will manage periodic data saves to JSON
 | 
			
		||||
@@ -244,16 +239,12 @@ class ChangeDetectionStore:
 | 
			
		||||
        import pathlib
 | 
			
		||||
 | 
			
		||||
        self.__data['watching'][uuid].update({
 | 
			
		||||
                'browser_steps_last_error_step' : None,
 | 
			
		||||
                'check_count': 0,
 | 
			
		||||
                'fetch_time' : 0.0,
 | 
			
		||||
                'has_ldjson_price_data': None,
 | 
			
		||||
                'last_checked': 0,
 | 
			
		||||
                'has_ldjson_price_data': None,
 | 
			
		||||
                'last_error': False,
 | 
			
		||||
                'last_notification_error': False,
 | 
			
		||||
                'last_viewed': 0,
 | 
			
		||||
                'previous_md5': False,
 | 
			
		||||
                'previous_md5_before_filters': False,
 | 
			
		||||
                'track_ldjson_price_data': None,
 | 
			
		||||
            })
 | 
			
		||||
 | 
			
		||||
@@ -360,8 +351,6 @@ class ChangeDetectionStore:
 | 
			
		||||
        if write_to_disk_now:
 | 
			
		||||
            self.sync_to_json()
 | 
			
		||||
 | 
			
		||||
        print("added ", url)
 | 
			
		||||
 | 
			
		||||
        return new_uuid
 | 
			
		||||
 | 
			
		||||
    def visualselector_data_is_ready(self, watch_uuid):
 | 
			
		||||
@@ -633,23 +622,17 @@ class ChangeDetectionStore:
 | 
			
		||||
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def extra_browsers(self):
 | 
			
		||||
        res = []
 | 
			
		||||
        p = list(filter(
 | 
			
		||||
            lambda s: (s.get('browser_name') and s.get('browser_connection_url')),
 | 
			
		||||
            self.__data['settings']['requests'].get('extra_browsers', [])))
 | 
			
		||||
        if p:
 | 
			
		||||
            for i in p:
 | 
			
		||||
                res.append(("extra_browser_"+i['browser_name'], i['browser_name']))
 | 
			
		||||
 | 
			
		||||
        return res
 | 
			
		||||
 | 
			
		||||
    def tag_exists_by_name(self, tag_name):
 | 
			
		||||
        return any(v.get('title', '').lower() == tag_name.lower() for k, v in self.__data['settings']['application']['tags'].items())
 | 
			
		||||
 | 
			
		||||
    def get_updates_available(self):
 | 
			
		||||
    # Run all updates
 | 
			
		||||
    # IMPORTANT - Each update could be run even when they have a new install and the schema is correct
 | 
			
		||||
    #             So therefor - each `update_n` should be very careful about checking if it needs to actually run
 | 
			
		||||
    #             Probably we should bump the current update schema version with each tag release version?
 | 
			
		||||
    def run_updates(self):
 | 
			
		||||
        import inspect
 | 
			
		||||
        import shutil
 | 
			
		||||
 | 
			
		||||
        updates_available = []
 | 
			
		||||
        for i, o in inspect.getmembers(self, predicate=inspect.ismethod):
 | 
			
		||||
            m = re.search(r'update_(\d+)$', i)
 | 
			
		||||
@@ -657,15 +640,6 @@ class ChangeDetectionStore:
 | 
			
		||||
                updates_available.append(int(m.group(1)))
 | 
			
		||||
        updates_available.sort()
 | 
			
		||||
 | 
			
		||||
        return updates_available
 | 
			
		||||
 | 
			
		||||
    # Run all updates
 | 
			
		||||
    # IMPORTANT - Each update could be run even when they have a new install and the schema is correct
 | 
			
		||||
    #             So therefor - each `update_n` should be very careful about checking if it needs to actually run
 | 
			
		||||
    #             Probably we should bump the current update schema version with each tag release version?
 | 
			
		||||
    def run_updates(self):
 | 
			
		||||
        import shutil
 | 
			
		||||
        updates_available = self.get_updates_available()
 | 
			
		||||
        for update_n in updates_available:
 | 
			
		||||
            if update_n > self.__data['settings']['application']['schema_version']:
 | 
			
		||||
                print ("Applying update_{}".format((update_n)))
 | 
			
		||||
@@ -847,14 +821,4 @@ class ChangeDetectionStore:
 | 
			
		||||
            if not watch.get('date_created'):
 | 
			
		||||
                self.data['watching'][uuid]['date_created'] = i
 | 
			
		||||
            i+=1
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    # #1774 - protect xpath1 against migration
 | 
			
		||||
    def update_14(self):
 | 
			
		||||
        for awatch in self.__data["watching"]:
 | 
			
		||||
            if self.__data["watching"][awatch]['include_filters']:
 | 
			
		||||
                for num, selector in enumerate(self.__data["watching"][awatch]['include_filters']):
 | 
			
		||||
                    if selector.startswith('/'):
 | 
			
		||||
                        self.__data["watching"][awatch]['include_filters'][num] = 'xpath1:' + selector
 | 
			
		||||
                    if selector.startswith('xpath:'):
 | 
			
		||||
                        self.__data["watching"][awatch]['include_filters'][num] = selector.replace('xpath:', 'xpath1:', 1)
 | 
			
		||||
        return
 | 
			
		||||
@@ -8,10 +8,10 @@
 | 
			
		||||
    <title>Change Detection{{extra_title}}</title>
 | 
			
		||||
    <link rel="alternate" type="application/rss+xml" title="Changedetection.io » Feed{% if active_tag %}- {{active_tag}}{% endif %}" href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}" >
 | 
			
		||||
    <link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='pure-min.css')}}" >
 | 
			
		||||
    <link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='styles.css')}}?v={{ get_css_version() }}" >
 | 
			
		||||
    <link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='styles.css')}}" >
 | 
			
		||||
    {% if extra_stylesheets %}
 | 
			
		||||
      {% for m in extra_stylesheets %}
 | 
			
		||||
        <link rel="stylesheet" href="{{ m }}?ver={{ get_css_version() }}" >
 | 
			
		||||
        <link rel="stylesheet" href="{{ m }}?ver=1000" >
 | 
			
		||||
      {% endfor %}
 | 
			
		||||
    {% endif %}
 | 
			
		||||
 | 
			
		||||
@@ -85,7 +85,6 @@
 | 
			
		||||
              <a href="{{url_for('logout')}}" class="pure-menu-link">LOG OUT</a>
 | 
			
		||||
            </li>
 | 
			
		||||
          {% endif %}
 | 
			
		||||
          {% if current_user.is_authenticated or not has_password %}
 | 
			
		||||
          <li class="pure-menu-item pure-form" id="search-menu-item">
 | 
			
		||||
            <!-- We use GET here so it offers people a chance to set bookmarks etc -->
 | 
			
		||||
            <form name="searchForm" action="" method="GET">
 | 
			
		||||
@@ -96,7 +95,6 @@
 | 
			
		||||
              </button>
 | 
			
		||||
            </form>
 | 
			
		||||
          </li>
 | 
			
		||||
          {% endif %}
 | 
			
		||||
          <li class="pure-menu-item">
 | 
			
		||||
            <button class="toggle-button" id ="toggle-light-mode" type="button" title="Toggle Light/Dark Mode">
 | 
			
		||||
              <span class="visually-hidden">Toggle light/dark mode</span>
 | 
			
		||||
@@ -108,20 +106,6 @@
 | 
			
		||||
              </span>
 | 
			
		||||
            </button>
 | 
			
		||||
          </li>
 | 
			
		||||
          <li class="pure-menu-item" id="heart-us">
 | 
			
		||||
                <svg
 | 
			
		||||
                   fill="#ff0000"
 | 
			
		||||
                   class="bi bi-heart"
 | 
			
		||||
                   preserveAspectRatio="xMidYMid meet"
 | 
			
		||||
                   viewBox="0 0 16.9 16.1"
 | 
			
		||||
                   id="svg-heart"
 | 
			
		||||
                   xmlns="http://www.w3.org/2000/svg"
 | 
			
		||||
                   xmlns:svg="http://www.w3.org/2000/svg">
 | 
			
		||||
                  <path id="heartpath" d="M 5.338316,0.50302766 C 0.71136983,0.50647126 -3.9576371,7.2707777 8.5004254,15.503028 23.833425,5.3700277 13.220206,-2.5384409 8.6762066,1.6475589 c -0.060791,0.054322 -0.11943,0.1110064 -0.1757812,0.1699219 -0.057,-0.059 -0.1157813,-0.116875 -0.1757812,-0.171875 C 7.4724566,0.86129334 6.4060729,0.50223298 5.338316,0.50302766 Z"
 | 
			
		||||
                     style="fill:var(--color-background);fill-opacity:1;stroke:#ff0000;stroke-opacity:1" />
 | 
			
		||||
                </svg>
 | 
			
		||||
 | 
			
		||||
          </li>
 | 
			
		||||
          <li class="pure-menu-item">
 | 
			
		||||
            <a class="github-link" href="https://github.com/dgtlmoon/changedetection.io">
 | 
			
		||||
              {% include "svgs/github.svg" %}
 | 
			
		||||
@@ -137,52 +121,14 @@
 | 
			
		||||
    {% endif %}
 | 
			
		||||
    {% if left_sticky %}
 | 
			
		||||
      <div class="sticky-tab" id="left-sticky">
 | 
			
		||||
        <a href="{{url_for('preview_page', uuid=uuid)}}">Show current snapshot</a><br>
 | 
			
		||||
          Visualise <strong>triggers</strong> and <strong>ignored text</strong>
 | 
			
		||||
        <a href="{{url_for('preview_page', uuid=uuid)}}">Show current snapshot</a>
 | 
			
		||||
      </div>
 | 
			
		||||
    {% endif %}
 | 
			
		||||
    {% if right_sticky %}
 | 
			
		||||
      <div class="sticky-tab" id="right-sticky">{{ right_sticky }}</div>
 | 
			
		||||
    {% endif %}
 | 
			
		||||
    <section class="content">
 | 
			
		||||
        <div id="overlay">
 | 
			
		||||
            <div class="content">
 | 
			
		||||
                <strong>changedetection.io needs your support!</strong><br>
 | 
			
		||||
                <p>
 | 
			
		||||
                    You can help us by supporting changedetection.io on these platforms;
 | 
			
		||||
                </p>
 | 
			
		||||
                <p>
 | 
			
		||||
                <ul>
 | 
			
		||||
                    <li>
 | 
			
		||||
                        <a href="https://alternativeto.net/software/changedetection-io/about/">Rate us at
 | 
			
		||||
                        AlternativeTo.net</a>
 | 
			
		||||
                    </li>
 | 
			
		||||
                <li>
 | 
			
		||||
                    <a href="https://github.com/dgtlmoon/changedetection.io">Star us on GitHub</a>
 | 
			
		||||
                </li>
 | 
			
		||||
                <li>
 | 
			
		||||
                    <a href="https://twitter.com/change_det_io">Follow us at Twitter/X</a>
 | 
			
		||||
                </li>
 | 
			
		||||
                <li>
 | 
			
		||||
                    <a href="https://www.linkedin.com/company/changedetection-io">Check us out on LinkedIn</a>
 | 
			
		||||
                </li>
 | 
			
		||||
                <li>
 | 
			
		||||
                    And tell your friends and colleagues :)
 | 
			
		||||
                </li>
 | 
			
		||||
                </ul>
 | 
			
		||||
                </p>
 | 
			
		||||
                <p>
 | 
			
		||||
                    The more popular changedetection.io is, the more time we can dedicate to adding amazing features!
 | 
			
		||||
                </p>
 | 
			
		||||
                <p>
 | 
			
		||||
                    Many thanks :)<br>
 | 
			
		||||
                </p>
 | 
			
		||||
                <p>
 | 
			
		||||
                    <i>changedetection.io team</i>
 | 
			
		||||
                </p>
 | 
			
		||||
            </div>
 | 
			
		||||
        </div>
 | 
			
		||||
        <header>
 | 
			
		||||
      <header>
 | 
			
		||||
        {% block header %}{% endblock %}
 | 
			
		||||
      </header>
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -13,31 +13,10 @@
 | 
			
		||||
<script src="{{url_for('static_content', group='js', filename='diff-overview.js')}}" defer></script>
 | 
			
		||||
 | 
			
		||||
<div id="settings">
 | 
			
		||||
    <h1>Differences</h1>
 | 
			
		||||
    <form class="pure-form " action="" method="GET">
 | 
			
		||||
        <fieldset>
 | 
			
		||||
            {% if versions|length >= 1 %}
 | 
			
		||||
                <strong>Compare</strong>
 | 
			
		||||
                <del class="change"><span>from</span></del>
 | 
			
		||||
                <select id="diff-version" name="from_version" class="needs-localtime">
 | 
			
		||||
                    {% for version in versions|reverse %}
 | 
			
		||||
                        <option value="{{ version }}" {% if version== from_version %} selected="" {% endif %}>
 | 
			
		||||
                            {{ version }}
 | 
			
		||||
                        </option>
 | 
			
		||||
                    {% endfor %}
 | 
			
		||||
                </select>
 | 
			
		||||
                <ins class="change"><span>to</span></ins>
 | 
			
		||||
                <select id="current-version" name="to_version" class="needs-localtime">
 | 
			
		||||
                    {% for version in versions|reverse %}
 | 
			
		||||
                        <option value="{{ version }}" {% if version== to_version %} selected="" {% endif %}>
 | 
			
		||||
                            {{ version }}
 | 
			
		||||
                        </option>
 | 
			
		||||
                    {% endfor %}
 | 
			
		||||
                </select>
 | 
			
		||||
                <button type="submit" class="pure-button pure-button-primary">Go</button>
 | 
			
		||||
            {% endif %}
 | 
			
		||||
        </fieldset>
 | 
			
		||||
        <fieldset>
 | 
			
		||||
            <strong>Style</strong>
 | 
			
		||||
 | 
			
		||||
            <label for="diffWords" class="pure-checkbox">
 | 
			
		||||
                <input type="radio" name="diff_type" id="diffWords" value="diffWords"> Words</label>
 | 
			
		||||
            <label for="diffLines" class="pure-checkbox">
 | 
			
		||||
@@ -47,20 +26,32 @@
 | 
			
		||||
                <input type="radio" name="diff_type" id="diffChars" value="diffChars"> Chars</label>
 | 
			
		||||
            <!-- @todo - when mimetype is JSON, select this by default? -->
 | 
			
		||||
            <label for="diffJson" class="pure-checkbox">
 | 
			
		||||
                <input type="radio" name="diff_type" id="diffJson" value="diffJson"> JSON</label>
 | 
			
		||||
                <input type="radio" name="diff_type" id="diffJson" value="diffJson" > JSON</label>
 | 
			
		||||
 | 
			
		||||
            <span>
 | 
			
		||||
        <!-- https://github.com/kpdecker/jsdiff/issues/389 ? -->
 | 
			
		||||
        <label for="ignoreWhitespace" class="pure-checkbox" id="label-diff-ignorewhitespace">
 | 
			
		||||
            <input type="checkbox" id="ignoreWhitespace" name="ignoreWhitespace"> Ignore Whitespace</label>
 | 
			
		||||
    </span>
 | 
			
		||||
            {% if versions|length >= 1 %}
 | 
			
		||||
            <label for="diff-version">Compare newest (<span id="current-v-date"></span>) with</label>
 | 
			
		||||
            <select id="diff-version" name="previous_version">
 | 
			
		||||
                {% for version in versions|reverse %}
 | 
			
		||||
                <option value="{{version}}" {% if version== current_previous_version %} selected="" {% endif %}>
 | 
			
		||||
                    {{version}}
 | 
			
		||||
                </option>
 | 
			
		||||
                {% endfor %}
 | 
			
		||||
            </select>
 | 
			
		||||
            <button type="submit" class="pure-button pure-button-primary">Go</button>
 | 
			
		||||
            {% endif %}
 | 
			
		||||
        </fieldset>
 | 
			
		||||
    </form>
 | 
			
		||||
 | 
			
		||||
    <del>Removed text</del>
 | 
			
		||||
    <ins>Inserted Text</ins>
 | 
			
		||||
    <span>
 | 
			
		||||
        <!-- https://github.com/kpdecker/jsdiff/issues/389 ? -->
 | 
			
		||||
        <label for="ignoreWhitespace" class="pure-checkbox" id="label-diff-ignorewhitespace">
 | 
			
		||||
            <input type="checkbox" id="ignoreWhitespace" name="ignoreWhitespace" > Ignore Whitespace</label>
 | 
			
		||||
    </span>
 | 
			
		||||
</div>
 | 
			
		||||
 | 
			
		||||
<div id="diff-jump">
 | 
			
		||||
    <a id="jump-next-diff" title="Jump to next difference">Jump</a>
 | 
			
		||||
    <a onclick="next_diff();">Jump</a>
 | 
			
		||||
</div>
 | 
			
		||||
 | 
			
		||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
 | 
			
		||||
@@ -88,6 +79,8 @@
 | 
			
		||||
    </div>
 | 
			
		||||
 | 
			
		||||
     <div class="tab-pane-inner" id="text">
 | 
			
		||||
         <div class="tip">Pro-tip: Use <strong>show current snapshot</strong> tab to visualise what will be ignored, highlight text to add to ignore filters</div>
 | 
			
		||||
 | 
			
		||||
         {% if password_enabled_and_share_is_off %}
 | 
			
		||||
           <div class="tip">Pro-tip: You can enable <strong>"share access when password is enabled"</strong> from settings</div>
 | 
			
		||||
         {% endif %}
 | 
			
		||||
@@ -98,8 +91,8 @@
 | 
			
		||||
             <tbody>
 | 
			
		||||
             <tr>
 | 
			
		||||
                 <!-- just proof of concept copied straight from github.com/kpdecker/jsdiff -->
 | 
			
		||||
                 <td id="a" style="display: none;">{{from_version_file_contents}}</td>
 | 
			
		||||
                 <td id="b" style="display: none;">{{to_version_file_contents}}</td>
 | 
			
		||||
                 <td id="a" style="display: none;">{{previous}}</td>
 | 
			
		||||
                 <td id="b" style="display: none;">{{newest}}</td>
 | 
			
		||||
                 <td id="diff-col">
 | 
			
		||||
                     <span id="result" class="highlightable-filter"></span>
 | 
			
		||||
                 </td>
 | 
			
		||||
 
 | 
			
		||||
@@ -4,10 +4,8 @@
 | 
			
		||||
{% from '_common_fields.jinja' import render_common_settings_form %}
 | 
			
		||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
 | 
			
		||||
<script>
 | 
			
		||||
    const browser_steps_available_screenshots=JSON.parse('{{ watch.get_browsersteps_available_screenshots|tojson }}');
 | 
			
		||||
 | 
			
		||||
    const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}');
 | 
			
		||||
    const browser_steps_fetch_screenshot_image_url="{{url_for('browser_steps.browser_steps_fetch_screenshot_image', uuid=uuid)}}";
 | 
			
		||||
    const browser_steps_last_error_step={{ watch.browser_steps_last_error_step|tojson }};
 | 
			
		||||
    const browser_steps_start_url="{{url_for('browser_steps.browsersteps_start_session', uuid=uuid)}}";
 | 
			
		||||
    const browser_steps_sync_url="{{url_for('browser_steps.browsersteps_ui_update', uuid=uuid)}}";
 | 
			
		||||
{% if emailprefix %}
 | 
			
		||||
@@ -51,7 +49,6 @@
 | 
			
		||||
            <li class="tab"><a href="#restock">Restock Detection</a></li>
 | 
			
		||||
            {% endif %}
 | 
			
		||||
            <li class="tab"><a href="#notifications">Notifications</a></li>
 | 
			
		||||
            <li class="tab"><a href="#stats">Stats</a></li>
 | 
			
		||||
        </ul>
 | 
			
		||||
    </div>
 | 
			
		||||
 | 
			
		||||
@@ -112,7 +109,7 @@
 | 
			
		||||
                        <span class="pure-form-message-inline">
 | 
			
		||||
                            <p>Use the <strong>Basic</strong> method (default) where your watched site doesn't need Javascript to render.</p>
 | 
			
		||||
                            <p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>
 | 
			
		||||
                            Tip: <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support">Connect using Bright Data and Oxylabs Proxies, find out more here.</a>
 | 
			
		||||
                            Tip: <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support">Connect using BrightData Proxies, find out more here.</a>
 | 
			
		||||
                        </span>
 | 
			
		||||
                    </div>
 | 
			
		||||
                {% if form.proxy %}
 | 
			
		||||
@@ -290,12 +287,11 @@ xpath://body/div/span[contains(@class, 'example-class')]",
 | 
			
		||||
                                {% endif %}
 | 
			
		||||
                            </ul>
 | 
			
		||||
                        </li>
 | 
			
		||||
                        <li>XPath - Limit text to this XPath rule, simply start with a forward-slash. To specify XPath to be used explicitly or the XPath rule starts with an XPath function: Prefix with <code>xpath:</code>
 | 
			
		||||
                        <li>XPath - Limit text to this XPath rule, simply start with a forward-slash,
 | 
			
		||||
                            <ul>
 | 
			
		||||
                                <li>Example:  <code>//*[contains(@class, 'sametext')]</code> or <code>xpath:count(//*[contains(@class, 'sametext')])</code>, <a
 | 
			
		||||
                                <li>Example:  <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
 | 
			
		||||
                                href="http://xpather.com/" target="new">test your XPath here</a></li>
 | 
			
		||||
                                <li>Example: Get all titles from an RSS feed <code>//title/text()</code></li>
 | 
			
		||||
                                <li>To use XPath1.0: Prefix with <code>xpath1:</code></li>
 | 
			
		||||
                            </ul>
 | 
			
		||||
                            </li>
 | 
			
		||||
                    </ul>
 | 
			
		||||
@@ -445,35 +441,7 @@ Unavailable") }}
 | 
			
		||||
                </fieldset>
 | 
			
		||||
            </div>
 | 
			
		||||
            {% endif %}
 | 
			
		||||
            <div class="tab-pane-inner" id="stats">
 | 
			
		||||
                <div class="pure-control-group">
 | 
			
		||||
                    <style>
 | 
			
		||||
                    #stats-table tr > td:first-child {
 | 
			
		||||
                        font-weight: bold;
 | 
			
		||||
                    }
 | 
			
		||||
                    </style>
 | 
			
		||||
                    <table class="pure-table" id="stats-table">
 | 
			
		||||
                        <tbody>
 | 
			
		||||
                        <tr>
 | 
			
		||||
                            <td>Check count</td>
 | 
			
		||||
                            <td>{{ "{:,}".format( watch.check_count) }}</td>
 | 
			
		||||
                        </tr>
 | 
			
		||||
                        <tr>
 | 
			
		||||
                            <td>Consecutive filter failures</td>
 | 
			
		||||
                            <td>{{ "{:,}".format( watch.consecutive_filter_failures) }}</td>
 | 
			
		||||
                        </tr>
 | 
			
		||||
                        <tr>
 | 
			
		||||
                            <td>History length</td>
 | 
			
		||||
                            <td>{{ "{:,}".format(watch.history|length) }}</td>
 | 
			
		||||
                        </tr>
 | 
			
		||||
                        <tr>
 | 
			
		||||
                            <td>Last fetch time</td>
 | 
			
		||||
                            <td>{{ watch.fetch_time }}s</td>
 | 
			
		||||
                        </tr>
 | 
			
		||||
                        </tbody>
 | 
			
		||||
                    </table>
 | 
			
		||||
                </div>
 | 
			
		||||
            </div>
 | 
			
		||||
 | 
			
		||||
            <div id="actions">
 | 
			
		||||
                <div class="pure-control-group">
 | 
			
		||||
                    {{ render_button(form.save_button) }}
 | 
			
		||||
 
 | 
			
		||||
@@ -8,12 +8,11 @@
 | 
			
		||||
        <ul>
 | 
			
		||||
            <li class="tab" id=""><a href="#url-list">URL List</a></li>
 | 
			
		||||
            <li class="tab"><a href="#distill-io">Distill.io</a></li>
 | 
			
		||||
            <li class="tab"><a href="#xlsx">.XLSX & Wachete</a></li>
 | 
			
		||||
        </ul>
 | 
			
		||||
    </div>
 | 
			
		||||
 | 
			
		||||
    <div class="box-wrap inner">
 | 
			
		||||
        <form class="pure-form" action="{{url_for('import_page')}}" method="POST" enctype="multipart/form-data">
 | 
			
		||||
        <form class="pure-form pure-form-aligned" action="{{url_for('import_page')}}" method="POST">
 | 
			
		||||
            <input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
 | 
			
		||||
            <div class="tab-pane-inner" id="url-list">
 | 
			
		||||
                    <legend>
 | 
			
		||||
@@ -80,42 +79,6 @@
 | 
			
		||||
" rows="25">{{ original_distill_json }}</textarea>
 | 
			
		||||
 | 
			
		||||
            </div>
 | 
			
		||||
            <div class="tab-pane-inner" id="xlsx">
 | 
			
		||||
            <fieldset>
 | 
			
		||||
                <div class="pure-control-group">
 | 
			
		||||
                {{ render_field(form.xlsx_file, class="processor") }}
 | 
			
		||||
                </div>
 | 
			
		||||
                <div class="pure-control-group">
 | 
			
		||||
                    {{ render_field(form.file_mapping, class="processor") }}
 | 
			
		||||
                </div>
 | 
			
		||||
            </fieldset>
 | 
			
		||||
                <div class="pure-control-group">
 | 
			
		||||
                <span class="pure-form-message-inline">
 | 
			
		||||
                    Table of custom column and data types mapping for the <strong>Custom mapping</strong> File mapping type.
 | 
			
		||||
                </span>
 | 
			
		||||
                    <table style="border: 1px solid #aaa; padding: 0.5rem; border-radius: 4px;">
 | 
			
		||||
                        <tr>
 | 
			
		||||
                            <td><strong>Column #</strong></td>
 | 
			
		||||
                            {% for n in range(4) %}
 | 
			
		||||
                                <td><input type="number" name="custom_xlsx[col_{{n}}]" style="width: 4rem;" min="1"></td>
 | 
			
		||||
                            {%  endfor %}
 | 
			
		||||
                        </tr>
 | 
			
		||||
                        <tr>
 | 
			
		||||
                            <td><strong>Type</strong></td>
 | 
			
		||||
                            {% for n in range(4) %}
 | 
			
		||||
                                <td><select name="custom_xlsx[col_type_{{n}}]">
 | 
			
		||||
                                    <option value="" style="color: #aaa"> -- none --</option>
 | 
			
		||||
                                    <option value="url">URL</option>
 | 
			
		||||
                                    <option value="title">Title</option>
 | 
			
		||||
                                    <option value="include_filter">CSS/xPath filter</option>
 | 
			
		||||
                                    <option value="tag">Group / Tag name(s)</option>
 | 
			
		||||
                                    <option value="interval_minutes">Recheck time (minutes)</option>
 | 
			
		||||
                                </select></td>
 | 
			
		||||
                            {%  endfor %}
 | 
			
		||||
                        </tr>
 | 
			
		||||
                    </table>
 | 
			
		||||
                </div>
 | 
			
		||||
            </div>
 | 
			
		||||
            <button type="submit" class="pure-button pure-input-1-2 pure-button-primary">Import</button>
 | 
			
		||||
        </form>
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -109,7 +109,7 @@
 | 
			
		||||
                        <p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>
 | 
			
		||||
                    </span>
 | 
			
		||||
                    <br>
 | 
			
		||||
                    Tip: <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support">Connect using Bright Data and Oxylabs Proxies, find out more here.</a>
 | 
			
		||||
                    Tip: <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support">Connect using BrightData Proxies, find out more here.</a>
 | 
			
		||||
                </div>
 | 
			
		||||
                <fieldset class="pure-group" id="webdriver-override-options">
 | 
			
		||||
                    <div class="pure-form-message-inline">
 | 
			
		||||
@@ -178,9 +178,6 @@ nav
 | 
			
		||||
                        <span style="display:none;" id="api-key-copy" >copy</span>
 | 
			
		||||
                    </div>
 | 
			
		||||
                </div>
 | 
			
		||||
                <div class="pure-control-group">
 | 
			
		||||
                    <a href="{{url_for('settings_reset_api_key')}}" class="pure-button button-small button-cancel">Regenerate API key</a>
 | 
			
		||||
                </div>
 | 
			
		||||
            </div>
 | 
			
		||||
            <div class="tab-pane-inner" id="proxies">
 | 
			
		||||
                <div id="recommended-proxy">
 | 
			
		||||
@@ -230,15 +227,11 @@ nav
 | 
			
		||||
                </p>
 | 
			
		||||
               <p><strong>Tip</strong>: "Residential" and "Mobile" proxy type can be more successfull than "Data Center" for blocked websites.
 | 
			
		||||
 | 
			
		||||
                <div class="pure-control-group" id="extra-proxies-setting">
 | 
			
		||||
                <div class="pure-control-group">
 | 
			
		||||
                {{ render_field(form.requests.form.extra_proxies) }}
 | 
			
		||||
                <span class="pure-form-message-inline">"Name" will be used for selecting the proxy in the Watch Edit settings</span><br>
 | 
			
		||||
                <span class="pure-form-message-inline">"Name" will be used for selecting the proxy in the Watch Edit settings</span>
 | 
			
		||||
                <span class="pure-form-message-inline">SOCKS5 proxies with authentication are only supported with 'plain requests' fetcher, for other fetchers you should whitelist the IP access instead</span>
 | 
			
		||||
                </div>
 | 
			
		||||
                <div class="pure-control-group" id="extra-browsers-setting">
 | 
			
		||||
                    <span class="pure-form-message-inline"><i>Extra Browsers</i> allow changedetection.io to communicate with a different web-browser.</span><br>
 | 
			
		||||
                  {{ render_field(form.requests.form.extra_browsers) }}
 | 
			
		||||
                </div>
 | 
			
		||||
            </div>
 | 
			
		||||
            <div id="actions">
 | 
			
		||||
                <div class="pure-control-group">
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,3 @@
 | 
			
		||||
<svg class="octicon octicon-mark-github v-align-middle"  viewbox="0 0 16 16" version="1.1" aria-hidden="true">
 | 
			
		||||
    <path
 | 
			
		||||
     fill-rule="evenodd"
 | 
			
		||||
     d="M 8,0 C 3.58,0 0,3.58 0,8 c 0,3.54 2.29,6.53 5.47,7.59 0.4,0.07 0.55,-0.17 0.55,-0.38 0,-0.19 -0.01,-0.82 -0.01,-1.49 C 4,14.09 3.48,13.23 3.32,12.78 3.23,12.55 2.84,11.84 2.5,11.65 2.22,11.5 1.82,11.13 2.49,11.12 3.12,11.11 3.57,11.7 3.72,11.94 4.44,13.15 5.59,12.81 6.05,12.6 6.12,12.08 6.33,11.73 6.56,11.53 4.78,11.33 2.92,10.64 2.92,7.58 2.92,6.71 3.23,5.99 3.74,5.43 3.66,5.23 3.38,4.41 3.82,3.31 c 0,0 0.67,-0.21 2.2,0.82 0.64,-0.18 1.32,-0.27 2,-0.27 0.68,0 1.36,0.09 2,0.27 1.53,-1.04 2.2,-0.82 2.2,-0.82 0.44,1.1 0.16,1.92 0.08,2.12 0.51,0.56 0.82,1.27 0.82,2.15 0,3.07 -1.87,3.75 -3.65,3.95 0.29,0.25 0.54,0.73 0.54,1.48 0,1.07 -0.01,1.93 -0.01,2.2 0,0.21 0.15,0.46 0.55,0.38 A 8.013,8.013 0 0 0 16,8 C 16,3.58 12.42,0 8,0 Z"
 | 
			
		||||
     id="path2" />
 | 
			
		||||
<svg class="octicon octicon-mark-github v-align-middle" height="32" viewbox="0 0 16 16" version="1.1" width="32" aria-hidden="true">
 | 
			
		||||
  <path fill-rule="evenodd" d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0016 8c0-4.42-3.58-8-8-8z"></path>
 | 
			
		||||
</svg>
 | 
			
		||||
 
 | 
			
		||||
| 
		 Before Width: | Height: | Size: 917 B After Width: | Height: | Size: 749 B  | 
@@ -82,15 +82,12 @@
 | 
			
		||||
            </tr>
 | 
			
		||||
            {% endif %}
 | 
			
		||||
            {% for watch in (watches|sort(attribute=sort_attribute, reverse=sort_order == 'asc'))|pagination_slice(skip=pagination.skip) %}
 | 
			
		||||
 | 
			
		||||
                {% set is_unviewed =  watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %}
 | 
			
		||||
 | 
			
		||||
            <tr id="{{ watch.uuid }}"
 | 
			
		||||
                class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }} processor-{{ watch['processor'] }}
 | 
			
		||||
                {% if watch.last_error is defined and watch.last_error != False %}error{% endif %}
 | 
			
		||||
                {% if watch.last_notification_error is defined and watch.last_notification_error != False %}error{% endif %}
 | 
			
		||||
                {% if watch.paused is defined and watch.paused != False %}paused{% endif %}
 | 
			
		||||
                {% if is_unviewed %}unviewed{% endif %}
 | 
			
		||||
                {% if watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %}unviewed{% endif %}
 | 
			
		||||
                {% if watch.uuid in queued_uuids %}queued{% endif %}">
 | 
			
		||||
                <td class="inline checkbox-uuid" ><input name="uuids"  type="checkbox" value="{{ watch.uuid}} " > <span>{{ loop.index+pagination.skip }}</span></td>
 | 
			
		||||
                <td class="inline watch-controls">
 | 
			
		||||
@@ -107,9 +104,8 @@
 | 
			
		||||
 | 
			
		||||
                    {% if watch.get_fetch_backend == "html_webdriver"
 | 
			
		||||
                         or (  watch.get_fetch_backend == "system" and system_default_fetcher == 'html_webdriver'  )
 | 
			
		||||
                         or "extra_browser_" in watch.get_fetch_backend
 | 
			
		||||
                    %}
 | 
			
		||||
                    <img class="status-icon" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" title="Using a Chrome browser" >
 | 
			
		||||
                    <img class="status-icon" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" title="Using a chrome browser" >
 | 
			
		||||
                    {% endif %}
 | 
			
		||||
 | 
			
		||||
                    {%if watch.is_pdf  %}<img class="status-icon" src="{{url_for('static_content', group='images', filename='pdf-icon.svg')}}" title="Converting PDF to text" >{% endif %}
 | 
			
		||||
@@ -158,8 +154,8 @@
 | 
			
		||||
                    {% endfor %}
 | 
			
		||||
 | 
			
		||||
                </td>
 | 
			
		||||
                <td class="last-checked" data-timestamp="{{ watch.last_checked }}">{{watch|format_last_checked_time|safe}}</td>
 | 
			
		||||
                <td class="last-changed" data-timestamp="{{ watch.last_changed }}">{% if watch.history_n >=2 and watch.last_changed >0 %}
 | 
			
		||||
                <td class="last-checked">{{watch|format_last_checked_time|safe}}</td>
 | 
			
		||||
                <td class="last-changed">{% if watch.history_n >=2 and watch.last_changed >0 %}
 | 
			
		||||
                    {{watch.last_changed|format_timestamp_timeago}}
 | 
			
		||||
                    {% else %}
 | 
			
		||||
                    Not yet
 | 
			
		||||
@@ -170,13 +166,7 @@
 | 
			
		||||
                       class="recheck pure-button pure-button-primary">{% if watch.uuid in queued_uuids %}Queued{% else %}Recheck{% endif %}</a>
 | 
			
		||||
                    <a href="{{ url_for('edit_page', uuid=watch.uuid)}}" class="pure-button pure-button-primary">Edit</a>
 | 
			
		||||
                    {% if watch.history_n >= 2 %}
 | 
			
		||||
 | 
			
		||||
                        {%  if is_unviewed %}
 | 
			
		||||
                           <a href="{{ url_for('diff_history_page', uuid=watch.uuid, from_version=watch.get_next_snapshot_key_to_last_viewed) }}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">Diff</a>
 | 
			
		||||
                        {% else %}
 | 
			
		||||
                           <a href="{{ url_for('diff_history_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">Diff</a>
 | 
			
		||||
                        {% endif %}
 | 
			
		||||
 | 
			
		||||
                    <a href="{{ url_for('diff_history_page', uuid=watch.uuid) }}" target="{{watch.uuid}}" class="pure-button pure-button-primary diff-link">Diff</a>
 | 
			
		||||
                    {% else %}
 | 
			
		||||
                        {% if watch.history_n == 1 or (watch.history_n ==0 and watch.error_text_ctime )%}
 | 
			
		||||
                            <a href="{{ url_for('preview_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button pure-button-primary">Preview</a>
 | 
			
		||||
@@ -188,18 +178,13 @@
 | 
			
		||||
            </tbody>
 | 
			
		||||
        </table>
 | 
			
		||||
        <ul id="post-list-buttons">
 | 
			
		||||
            {% if errored_count %}
 | 
			
		||||
            <li>
 | 
			
		||||
                <a href="{{url_for('index', with_errors=1, tag=request.args.get('tag')) }}" class="pure-button button-tag button-error ">With errors ({{ errored_count }})</a>
 | 
			
		||||
            </li>
 | 
			
		||||
            {% endif %}
 | 
			
		||||
            {% if has_unviewed %}
 | 
			
		||||
            <li>
 | 
			
		||||
                <a href="{{url_for('mark_all_viewed',with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Mark all viewed</a>
 | 
			
		||||
                <a href="{{url_for('mark_all_viewed', tag=request.args.get('tag')) }}" class="pure-button button-tag ">Mark all viewed</a>
 | 
			
		||||
            </li>
 | 
			
		||||
            {% endif %}
 | 
			
		||||
            <li>
 | 
			
		||||
               <a href="{{ url_for('form_watch_checknow', tag=active_tag, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Recheck
 | 
			
		||||
               <a href="{{ url_for('form_watch_checknow', tag=active_tag) }}" class="pure-button button-tag ">Recheck
 | 
			
		||||
                all {% if active_tag%} in "{{tags[active_tag].title}}"{%endif%}</a>
 | 
			
		||||
            </li>
 | 
			
		||||
            <li>
 | 
			
		||||
 
 | 
			
		||||
@@ -1 +0,0 @@
 | 
			
		||||
# placeholder
 | 
			
		||||
@@ -1,89 +0,0 @@
 | 
			
		||||
# !/usr/bin/python3
 | 
			
		||||
import os
 | 
			
		||||
 | 
			
		||||
from flask import url_for
 | 
			
		||||
from ..util import live_server_setup, wait_for_all_checks
 | 
			
		||||
 | 
			
		||||
def do_test(client, live_server, make_test_use_extra_browser=False):
 | 
			
		||||
 | 
			
		||||
    # Grep for this string in the logs?
 | 
			
		||||
    test_url = f"https://changedetection.io/ci-test.html"
 | 
			
		||||
    custom_browser_name = 'custom browser URL'
 | 
			
		||||
 | 
			
		||||
    # needs to be set and something like 'ws://127.0.0.1:3000?stealth=1&--disable-web-security=true'
 | 
			
		||||
    assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
 | 
			
		||||
 | 
			
		||||
    #####################
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("settings_page"),
 | 
			
		||||
        data={"application-empty_pages_are_a_change": "",
 | 
			
		||||
              "requests-time_between_check-minutes": 180,
 | 
			
		||||
              'application-fetch_backend': "html_webdriver",
 | 
			
		||||
              # browserless-custom-url is setup in  .github/workflows/test-only.yml
 | 
			
		||||
              # the test script run_custom_browser_url_test.sh will look for 'custom-browser-search-string' in the container logs
 | 
			
		||||
              'requests-extra_browsers-0-browser_connection_url': 'ws://browserless-custom-url:3000?stealth=1&--disable-web-security=true&custom-browser-search-string=1',
 | 
			
		||||
              'requests-extra_browsers-0-browser_name': custom_browser_name
 | 
			
		||||
              },
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"Settings updated." in res.data
 | 
			
		||||
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("import_page"),
 | 
			
		||||
        data={"urls": test_url},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    if make_test_use_extra_browser:
 | 
			
		||||
 | 
			
		||||
        # So the name should appear in the edit page under "Request" > "Fetch Method"
 | 
			
		||||
        res = client.get(
 | 
			
		||||
            url_for("edit_page", uuid="first"),
 | 
			
		||||
            follow_redirects=True
 | 
			
		||||
        )
 | 
			
		||||
        assert b'custom browser URL' in res.data
 | 
			
		||||
 | 
			
		||||
        res = client.post(
 | 
			
		||||
            url_for("edit_page", uuid="first"),
 | 
			
		||||
            data={
 | 
			
		||||
                  "url": test_url,
 | 
			
		||||
                  "tags": "",
 | 
			
		||||
                  "headers": "",
 | 
			
		||||
                  'fetch_backend': f"extra_browser_{custom_browser_name}",
 | 
			
		||||
                  'webdriver_js_execute_code': ''
 | 
			
		||||
            },
 | 
			
		||||
            follow_redirects=True
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        assert b"Updated watch." in res.data
 | 
			
		||||
        wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    # Force recheck
 | 
			
		||||
    res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
 | 
			
		||||
    assert b'1 watches queued for rechecking.' in res.data
 | 
			
		||||
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("preview_page", uuid="first"),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b'cool it works' in res.data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Requires playwright to be installed
 | 
			
		||||
def test_request_via_custom_browser_url(client, live_server):
 | 
			
		||||
    live_server_setup(live_server)
 | 
			
		||||
    # We do this so we can grep the logs of the custom container and see if the request actually went through that container
 | 
			
		||||
    do_test(client, live_server, make_test_use_extra_browser=True)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_request_not_via_custom_browser_url(client, live_server):
 | 
			
		||||
    live_server_setup(live_server)
 | 
			
		||||
    # We do this so we can grep the logs of the custom container and see if the request actually went through that container
 | 
			
		||||
    do_test(client, live_server, make_test_use_extra_browser=False)
 | 
			
		||||
										
											Binary file not shown.
										
									
								
							
										
											Binary file not shown.
										
									
								
							@@ -1,4 +1,4 @@
 | 
			
		||||
from .util import live_server_setup, extract_UUID_from_client, wait_for_all_checks
 | 
			
		||||
from . util import live_server_setup, extract_UUID_from_client
 | 
			
		||||
from flask import url_for
 | 
			
		||||
import time
 | 
			
		||||
 | 
			
		||||
@@ -19,16 +19,10 @@ def test_check_access_control(app, client, live_server):
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        assert b"1 Imported" in res.data
 | 
			
		||||
        time.sleep(3)
 | 
			
		||||
        # causes a 'Popped wrong request context.' error when client. is accessed?
 | 
			
		||||
        #wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
        res = c.get(url_for("form_watch_checknow"), follow_redirects=True)
 | 
			
		||||
        time.sleep(2)
 | 
			
		||||
        res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
 | 
			
		||||
        assert b'1 watches queued for rechecking.' in res.data
 | 
			
		||||
        time.sleep(3)
 | 
			
		||||
        # causes a 'Popped wrong request context.' error when client. is accessed?
 | 
			
		||||
        #wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
        time.sleep(2)
 | 
			
		||||
 | 
			
		||||
        # Enable password check and diff page access bypass
 | 
			
		||||
        res = c.post(
 | 
			
		||||
@@ -48,7 +42,7 @@ def test_check_access_control(app, client, live_server):
 | 
			
		||||
        assert b"Login" in res.data
 | 
			
		||||
 | 
			
		||||
        # The diff page should return something valid when logged out
 | 
			
		||||
        res = c.get(url_for("diff_history_page", uuid="first"))
 | 
			
		||||
        res = client.get(url_for("diff_history_page", uuid="first"))
 | 
			
		||||
        assert b'Random content' in res.data
 | 
			
		||||
 | 
			
		||||
        # Check wrong password does not let us in
 | 
			
		||||
@@ -89,8 +83,6 @@ def test_check_access_control(app, client, live_server):
 | 
			
		||||
        res = c.get(url_for("logout"),
 | 
			
		||||
            follow_redirects=True)
 | 
			
		||||
 | 
			
		||||
        assert b"Login" in res.data
 | 
			
		||||
 | 
			
		||||
        res = c.get(url_for("settings_page"),
 | 
			
		||||
            follow_redirects=True)
 | 
			
		||||
 | 
			
		||||
@@ -168,5 +160,5 @@ def test_check_access_control(app, client, live_server):
 | 
			
		||||
        assert b"Login" in res.data
 | 
			
		||||
 | 
			
		||||
        # The diff page should return something valid when logged out
 | 
			
		||||
        res = c.get(url_for("diff_history_page", uuid="first"))
 | 
			
		||||
        res = client.get(url_for("diff_history_page", uuid="first"))
 | 
			
		||||
        assert b'Random content' not in res.data
 | 
			
		||||
 
 | 
			
		||||
@@ -89,7 +89,7 @@ def test_check_basic_change_detection_functionality(client, live_server):
 | 
			
		||||
 | 
			
		||||
    # Following the 'diff' link, it should no longer display as 'unviewed' even after we recheck it a few times
 | 
			
		||||
    res = client.get(url_for("diff_history_page", uuid="first"))
 | 
			
		||||
    assert b'selected=""' in res.data, "Confirm diff history page loaded"
 | 
			
		||||
    assert b'Compare newest' in res.data
 | 
			
		||||
 | 
			
		||||
    # Check the [preview] pulls the right one
 | 
			
		||||
    res = client.get(
 | 
			
		||||
 
 | 
			
		||||
@@ -24,7 +24,7 @@ def test_check_extract_text_from_diff(client, live_server):
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(1)
 | 
			
		||||
 | 
			
		||||
    # Load in 5 different numbers/changes
 | 
			
		||||
    last_date=""
 | 
			
		||||
 
 | 
			
		||||
@@ -202,32 +202,3 @@ def test_check_filter_and_regex_extract(client, live_server):
 | 
			
		||||
 | 
			
		||||
    # Should not be here
 | 
			
		||||
    assert b'Some text that did change' not in res.data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_regex_error_handling(client, live_server):
 | 
			
		||||
 | 
			
		||||
    #live_server_setup(live_server)
 | 
			
		||||
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
    test_url = url_for('test_endpoint', _external=True)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("import_page"),
 | 
			
		||||
        data={"urls": test_url},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
 | 
			
		||||
    ### test regex error handling
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid="first"),
 | 
			
		||||
        data={"extract_text": '/something bad\d{3/XYZ',
 | 
			
		||||
              "url": test_url,
 | 
			
		||||
              "fetch_backend": "html_requests"},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b'is not a valid regular expression.' in res.data
 | 
			
		||||
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
    assert b'Deleted' in res.data
 | 
			
		||||
 
 | 
			
		||||
@@ -33,6 +33,8 @@ def test_strip_regex_text_func():
 | 
			
		||||
        "/not"
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    fetcher = fetch_site_status.perform_site_check(datastore=False)
 | 
			
		||||
    stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines)
 | 
			
		||||
 | 
			
		||||
    assert b"but 1 lines" in stripped_content
 | 
			
		||||
 
 | 
			
		||||
@@ -24,6 +24,7 @@ def test_strip_text_func():
 | 
			
		||||
 | 
			
		||||
    ignore_lines = ["sometimes"]
 | 
			
		||||
 | 
			
		||||
    fetcher = fetch_site_status.perform_site_check(datastore=False)
 | 
			
		||||
    stripped_content = html_tools.strip_ignore_text(test_content, ignore_lines)
 | 
			
		||||
 | 
			
		||||
    assert b"sometimes" not in stripped_content
 | 
			
		||||
 
 | 
			
		||||
@@ -1,19 +1,16 @@
 | 
			
		||||
#!/usr/bin/python3
 | 
			
		||||
import io
 | 
			
		||||
import os
 | 
			
		||||
 | 
			
		||||
import time
 | 
			
		||||
 | 
			
		||||
from flask import url_for
 | 
			
		||||
 | 
			
		||||
from .util import live_server_setup, wait_for_all_checks
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
from .util import live_server_setup
 | 
			
		||||
def test_setup(client, live_server):
 | 
			
		||||
    live_server_setup(live_server)
 | 
			
		||||
 | 
			
		||||
def test_import(client, live_server):
 | 
			
		||||
    # Give the endpoint time to spin up
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(1)
 | 
			
		||||
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("import_page"),
 | 
			
		||||
@@ -122,97 +119,3 @@ def test_import_distillio(client, live_server):
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
    # Clear flask alerts
 | 
			
		||||
    res = client.get(url_for("index"))
 | 
			
		||||
 | 
			
		||||
def test_import_custom_xlsx(client, live_server):
 | 
			
		||||
    """Test can upload a excel spreadsheet and the watches are created correctly"""
 | 
			
		||||
 | 
			
		||||
    #live_server_setup(live_server)
 | 
			
		||||
 | 
			
		||||
    dirname = os.path.dirname(__file__)
 | 
			
		||||
    filename = os.path.join(dirname, 'import/spreadsheet.xlsx')
 | 
			
		||||
    with open(filename, 'rb') as f:
 | 
			
		||||
 | 
			
		||||
        data= {
 | 
			
		||||
            'file_mapping': 'custom',
 | 
			
		||||
            'custom_xlsx[col_0]': '1',
 | 
			
		||||
            'custom_xlsx[col_1]': '3',
 | 
			
		||||
            'custom_xlsx[col_2]': '5',
 | 
			
		||||
            'custom_xlsx[col_3]': '4',
 | 
			
		||||
            'custom_xlsx[col_type_0]': 'title',
 | 
			
		||||
            'custom_xlsx[col_type_1]': 'url',
 | 
			
		||||
            'custom_xlsx[col_type_2]': 'include_filters',
 | 
			
		||||
            'custom_xlsx[col_type_3]': 'interval_minutes',
 | 
			
		||||
            'xlsx_file': (io.BytesIO(f.read()), 'spreadsheet.xlsx')
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("import_page"),
 | 
			
		||||
        data=data,
 | 
			
		||||
        follow_redirects=True,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b'4 imported from custom .xlsx' in res.data
 | 
			
		||||
    # Because this row was actually just a header with no usable URL, we should get an error
 | 
			
		||||
    assert b'Error processing row number 1' in res.data
 | 
			
		||||
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("index")
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b'Somesite results ABC' in res.data
 | 
			
		||||
    assert b'City news results' in res.data
 | 
			
		||||
 | 
			
		||||
    # Just find one to check over
 | 
			
		||||
    for uuid, watch in live_server.app.config['DATASTORE'].data['watching'].items():
 | 
			
		||||
        if watch.get('title') == 'Somesite results ABC':
 | 
			
		||||
            filters = watch.get('include_filters')
 | 
			
		||||
            assert filters[0] == '/html[1]/body[1]/div[4]/div[1]/div[1]/div[1]||//*[@id=\'content\']/div[3]/div[1]/div[1]||//*[@id=\'content\']/div[1]'
 | 
			
		||||
            assert watch.get('time_between_check') == {'weeks': 0, 'days': 1, 'hours': 6, 'minutes': 24, 'seconds': 0}
 | 
			
		||||
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
    assert b'Deleted' in res.data
 | 
			
		||||
 | 
			
		||||
def test_import_watchete_xlsx(client, live_server):
 | 
			
		||||
    """Test can upload a excel spreadsheet and the watches are created correctly"""
 | 
			
		||||
 | 
			
		||||
    #live_server_setup(live_server)
 | 
			
		||||
    dirname = os.path.dirname(__file__)
 | 
			
		||||
    filename = os.path.join(dirname, 'import/spreadsheet.xlsx')
 | 
			
		||||
    with open(filename, 'rb') as f:
 | 
			
		||||
 | 
			
		||||
        data= {
 | 
			
		||||
            'file_mapping': 'wachete',
 | 
			
		||||
            'xlsx_file': (io.BytesIO(f.read()), 'spreadsheet.xlsx')
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("import_page"),
 | 
			
		||||
        data=data,
 | 
			
		||||
        follow_redirects=True,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b'4 imported from Wachete .xlsx' in res.data
 | 
			
		||||
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("index")
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b'Somesite results ABC' in res.data
 | 
			
		||||
    assert b'City news results' in res.data
 | 
			
		||||
 | 
			
		||||
    # Just find one to check over
 | 
			
		||||
    for uuid, watch in live_server.app.config['DATASTORE'].data['watching'].items():
 | 
			
		||||
        if watch.get('title') == 'Somesite results ABC':
 | 
			
		||||
            filters = watch.get('include_filters')
 | 
			
		||||
            assert filters[0] == '/html[1]/body[1]/div[4]/div[1]/div[1]/div[1]||//*[@id=\'content\']/div[3]/div[1]/div[1]||//*[@id=\'content\']/div[1]'
 | 
			
		||||
            assert watch.get('time_between_check') == {'weeks': 0, 'days': 1, 'hours': 6, 'minutes': 24, 'seconds': 0}
 | 
			
		||||
            assert watch.get('fetch_backend') == 'html_requests' # Has inactive 'dynamic wachet'
 | 
			
		||||
 | 
			
		||||
        if watch.get('title') == 'JS website':
 | 
			
		||||
            assert watch.get('fetch_backend') == 'html_webdriver' # Has active 'dynamic wachet'
 | 
			
		||||
 | 
			
		||||
        if watch.get('title') == 'system default website':
 | 
			
		||||
            assert watch.get('fetch_backend') == 'system' # uses default if blank
 | 
			
		||||
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
    assert b'Deleted' in res.data
 | 
			
		||||
 
 | 
			
		||||
@@ -2,8 +2,9 @@
 | 
			
		||||
 | 
			
		||||
import time
 | 
			
		||||
from flask import url_for
 | 
			
		||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
 | 
			
		||||
from .util import set_original_response, set_modified_response, live_server_setup
 | 
			
		||||
 | 
			
		||||
sleep_time_for_fetch_thread = 3
 | 
			
		||||
 | 
			
		||||
# `subtractive_selectors` should still work in `source:` type requests
 | 
			
		||||
def test_fetch_pdf(client, live_server):
 | 
			
		||||
@@ -21,9 +22,7 @@ def test_fetch_pdf(client, live_server):
 | 
			
		||||
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    time.sleep(sleep_time_for_fetch_thread)
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("preview_page", uuid="first"),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
@@ -34,42 +33,8 @@ def test_fetch_pdf(client, live_server):
 | 
			
		||||
 | 
			
		||||
    # So we know if the file changes in other ways
 | 
			
		||||
    import hashlib
 | 
			
		||||
    original_md5 = hashlib.md5(open("test-datastore/endpoint-test.pdf", 'rb').read()).hexdigest().upper()
 | 
			
		||||
    md5 = hashlib.md5(open("test-datastore/endpoint-test.pdf", 'rb').read()).hexdigest().upper()
 | 
			
		||||
    # We should have one
 | 
			
		||||
    assert len(original_md5) >0
 | 
			
		||||
    assert len(md5) >0
 | 
			
		||||
    # And it's going to be in the document
 | 
			
		||||
    assert b'Document checksum - '+bytes(str(original_md5).encode('utf-8')) in res.data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    shutil.copy("tests/test2.pdf", "test-datastore/endpoint-test.pdf")
 | 
			
		||||
    changed_md5 = hashlib.md5(open("test-datastore/endpoint-test.pdf", 'rb').read()).hexdigest().upper()
 | 
			
		||||
    res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
 | 
			
		||||
    assert b'1 watches queued for rechecking.' in res.data
 | 
			
		||||
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    # Now something should be ready, indicated by having a 'unviewed' class
 | 
			
		||||
    res = client.get(url_for("index"))
 | 
			
		||||
    assert b'unviewed' in res.data
 | 
			
		||||
 | 
			
		||||
    # The original checksum should be not be here anymore (cdio adds it to the bottom of the text)
 | 
			
		||||
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("preview_page", uuid="first"),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert original_md5.encode('utf-8') not in res.data
 | 
			
		||||
    assert changed_md5.encode('utf-8') in res.data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("diff_history_page", uuid="first"),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert original_md5.encode('utf-8') in res.data
 | 
			
		||||
    assert changed_md5.encode('utf-8') in res.data
 | 
			
		||||
 | 
			
		||||
    assert b'here is a change' in res.data
 | 
			
		||||
    
 | 
			
		||||
    assert b'Document checksum - '+bytes(str(md5).encode('utf-8')) in res.data
 | 
			
		||||
@@ -80,11 +80,8 @@ def test_headers_in_request(client, live_server):
 | 
			
		||||
 | 
			
		||||
    # Should be only one with headers set
 | 
			
		||||
    assert watches_with_headers==1
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
    assert b'Deleted' in res.data
 | 
			
		||||
 | 
			
		||||
def test_body_in_request(client, live_server):
 | 
			
		||||
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
    test_url = url_for('test_body', _external=True)
 | 
			
		||||
    if os.getenv('PLAYWRIGHT_DRIVER_URL'):
 | 
			
		||||
@@ -173,8 +170,7 @@ def test_body_in_request(client, live_server):
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"Body must be empty when Request Method is set to GET" in res.data
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
    assert b'Deleted' in res.data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_method_in_request(client, live_server):
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
 
 | 
			
		||||
@@ -2,61 +2,12 @@
 | 
			
		||||
 | 
			
		||||
import time
 | 
			
		||||
from flask import url_for
 | 
			
		||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, \
 | 
			
		||||
    extract_UUID_from_client
 | 
			
		||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def set_original_cdata_xml():
 | 
			
		||||
    test_return_data = """<rss xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:media="http://search.yahoo.com/mrss/" xmlns:atom="http://www.w3.org/2005/Atom" version="2.0">
 | 
			
		||||
    <channel>
 | 
			
		||||
    <title>Gizi</title>
 | 
			
		||||
    <link>https://test.com</link>
 | 
			
		||||
    <atom:link href="https://testsite.com" rel="self" type="application/rss+xml"/>
 | 
			
		||||
    <description>
 | 
			
		||||
    <![CDATA[ The Future Could Be Here ]]>
 | 
			
		||||
    </description>
 | 
			
		||||
    <language>en</language>
 | 
			
		||||
    <item>
 | 
			
		||||
    <title>
 | 
			
		||||
    <![CDATA[ <img src="https://testsite.com/hacked.jpg"> Hackers can access your computer ]]>
 | 
			
		||||
    </title>
 | 
			
		||||
    <link>https://testsite.com/news/12341234234</link>
 | 
			
		||||
    <description>
 | 
			
		||||
    <![CDATA[ <img class="type:primaryImage" src="https://testsite.com/701c981da04869e.jpg"/><p>The days of Terminator and The Matrix could be closer. But be positive.</p><p><a href="https://testsite.com">Read more link...</a></p> ]]>
 | 
			
		||||
    </description>
 | 
			
		||||
    <category>cybernetics</category>
 | 
			
		||||
    <category>rand corporation</category>
 | 
			
		||||
    <pubDate>Tue, 17 Oct 2023 15:10:00 GMT</pubDate>
 | 
			
		||||
    <guid isPermaLink="false">1850933241</guid>
 | 
			
		||||
    <dc:creator>
 | 
			
		||||
    <![CDATA[ Mr Hacker News ]]>
 | 
			
		||||
    </dc:creator>
 | 
			
		||||
    <media:thumbnail url="https://testsite.com/thumbnail-c224e10d81488e818701c981da04869e.jpg"/>
 | 
			
		||||
    </item>
 | 
			
		||||
 | 
			
		||||
    <item>
 | 
			
		||||
        <title>    Some other title    </title>
 | 
			
		||||
        <link>https://testsite.com/news/12341234236</link>
 | 
			
		||||
        <description>
 | 
			
		||||
        Some other description
 | 
			
		||||
        </description>
 | 
			
		||||
    </item>    
 | 
			
		||||
    </channel>
 | 
			
		||||
    </rss>
 | 
			
		||||
            """
 | 
			
		||||
 | 
			
		||||
    with open("test-datastore/endpoint-content.txt", "w") as f:
 | 
			
		||||
        f.write(test_return_data)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_setup(client, live_server):
 | 
			
		||||
    live_server_setup(live_server)
 | 
			
		||||
 | 
			
		||||
def test_rss_and_token(client, live_server):
 | 
			
		||||
    #    live_server_setup(live_server)
 | 
			
		||||
 | 
			
		||||
    set_original_response()
 | 
			
		||||
    rss_token = extract_rss_token_from_UI(client)
 | 
			
		||||
    live_server_setup(live_server)
 | 
			
		||||
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
    res = client.post(
 | 
			
		||||
@@ -66,11 +17,11 @@ def test_rss_and_token(client, live_server):
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    rss_token = extract_rss_token_from_UI(client)
 | 
			
		||||
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    set_modified_response()
 | 
			
		||||
    time.sleep(2)
 | 
			
		||||
    client.get(url_for("form_watch_checknow"), follow_redirects=True)
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(2)
 | 
			
		||||
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
    res = client.get(
 | 
			
		||||
@@ -86,80 +37,3 @@ def test_rss_and_token(client, live_server):
 | 
			
		||||
    )
 | 
			
		||||
    assert b"Access denied, bad token" not in res.data
 | 
			
		||||
    assert b"Random content" in res.data
 | 
			
		||||
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
 | 
			
		||||
def test_basic_cdata_rss_markup(client, live_server):
 | 
			
		||||
    #live_server_setup(live_server)
 | 
			
		||||
 | 
			
		||||
    set_original_cdata_xml()
 | 
			
		||||
 | 
			
		||||
    test_url = url_for('test_endpoint', content_type="application/xml", _external=True)
 | 
			
		||||
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("import_page"),
 | 
			
		||||
        data={"urls": test_url},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("preview_page", uuid="first"),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b'CDATA' not in res.data
 | 
			
		||||
    assert b'<![' not in res.data
 | 
			
		||||
    assert b'Hackers can access your computer' in res.data
 | 
			
		||||
    assert b'The days of Terminator' in res.data
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
 | 
			
		||||
def test_rss_xpath_filtering(client, live_server):
 | 
			
		||||
    #live_server_setup(live_server)
 | 
			
		||||
 | 
			
		||||
    set_original_cdata_xml()
 | 
			
		||||
 | 
			
		||||
    test_url = url_for('test_endpoint', content_type="application/xml", _external=True)
 | 
			
		||||
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("form_quick_watch_add"),
 | 
			
		||||
        data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"Watch added in Paused state, saving will unpause" in res.data
 | 
			
		||||
 | 
			
		||||
    uuid = extract_UUID_from_client(client)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid=uuid, unpause_on_save=1),
 | 
			
		||||
        data={
 | 
			
		||||
                "include_filters": "//item/title",
 | 
			
		||||
                "fetch_backend": "html_requests",
 | 
			
		||||
                "headers": "",
 | 
			
		||||
                "proxy": "no-proxy",
 | 
			
		||||
                "tags": "",
 | 
			
		||||
                "url": test_url,
 | 
			
		||||
              },
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"unpaused" in res.data
 | 
			
		||||
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("preview_page", uuid="first"),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b'CDATA' not in res.data
 | 
			
		||||
    assert b'<![' not in res.data
 | 
			
		||||
    # #1874  All but the first <title was getting selected
 | 
			
		||||
    # Convert any HTML with just a top level <title> to <h1> to be sure title renders
 | 
			
		||||
 | 
			
		||||
    assert b'Hackers can access your computer' in res.data # Should ONLY be selected by the xpath
 | 
			
		||||
    assert b'Some other title' in res.data  # Should ONLY be selected by the xpath
 | 
			
		||||
    assert b'The days of Terminator' not in res.data # Should NOT be selected by the xpath
 | 
			
		||||
    assert b'Some other description' not in res.data  # Should NOT be selected by the xpath
 | 
			
		||||
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,5 @@
 | 
			
		||||
from flask import url_for
 | 
			
		||||
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
 | 
			
		||||
from . util import set_original_response, set_modified_response, live_server_setup
 | 
			
		||||
import time
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -12,7 +12,6 @@ def test_bad_access(client, live_server):
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    # Attempt to add a body with a GET method
 | 
			
		||||
    res = client.post(
 | 
			
		||||
@@ -60,7 +59,7 @@ def test_bad_access(client, live_server):
 | 
			
		||||
        data={"url": 'file:///tasty/disk/drive', "tags": ''},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(1)
 | 
			
		||||
    res = client.get(url_for("index"))
 | 
			
		||||
 | 
			
		||||
    assert b'file:// type access is denied for security reasons.' in res.data
 | 
			
		||||
@@ -2,15 +2,13 @@
 | 
			
		||||
 | 
			
		||||
import time
 | 
			
		||||
from flask import url_for
 | 
			
		||||
from .util import live_server_setup, wait_for_all_checks
 | 
			
		||||
from . util import live_server_setup
 | 
			
		||||
 | 
			
		||||
from ..html_tools import *
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_setup(live_server):
 | 
			
		||||
    live_server_setup(live_server)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def set_original_response():
 | 
			
		||||
    test_return_data = """<html>
 | 
			
		||||
       <body>
 | 
			
		||||
@@ -28,7 +26,6 @@ def set_original_response():
 | 
			
		||||
        f.write(test_return_data)
 | 
			
		||||
    return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def set_modified_response():
 | 
			
		||||
    test_return_data = """<html>
 | 
			
		||||
       <body>
 | 
			
		||||
@@ -47,12 +44,11 @@ def set_modified_response():
 | 
			
		||||
 | 
			
		||||
    return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Handle utf-8 charset replies https://github.com/dgtlmoon/changedetection.io/pull/613
 | 
			
		||||
def test_check_xpath_filter_utf8(client, live_server):
 | 
			
		||||
    filter = '//item/*[self::description]'
 | 
			
		||||
    filter='//item/*[self::description]'
 | 
			
		||||
 | 
			
		||||
    d = '''<?xml version="1.0" encoding="UTF-8"?>
 | 
			
		||||
    d='''<?xml version="1.0" encoding="UTF-8"?>
 | 
			
		||||
<rss xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0">
 | 
			
		||||
	<channel>
 | 
			
		||||
		<title>rpilocator.com</title>
 | 
			
		||||
@@ -90,14 +86,14 @@ def test_check_xpath_filter_utf8(client, live_server):
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(1)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid="first"),
 | 
			
		||||
        data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"Updated watch." in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(3)
 | 
			
		||||
    res = client.get(url_for("index"))
 | 
			
		||||
    assert b'Unicode strings with encoding declaration are not supported.' not in res.data
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
@@ -106,9 +102,9 @@ def test_check_xpath_filter_utf8(client, live_server):
 | 
			
		||||
 | 
			
		||||
# Handle utf-8 charset replies https://github.com/dgtlmoon/changedetection.io/pull/613
 | 
			
		||||
def test_check_xpath_text_function_utf8(client, live_server):
 | 
			
		||||
    filter = '//item/title/text()'
 | 
			
		||||
    filter='//item/title/text()'
 | 
			
		||||
 | 
			
		||||
    d = '''<?xml version="1.0" encoding="UTF-8"?>
 | 
			
		||||
    d='''<?xml version="1.0" encoding="UTF-8"?>
 | 
			
		||||
<rss xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0">
 | 
			
		||||
	<channel>
 | 
			
		||||
		<title>rpilocator.com</title>
 | 
			
		||||
@@ -144,14 +140,14 @@ def test_check_xpath_text_function_utf8(client, live_server):
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(1)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid="first"),
 | 
			
		||||
        data={"include_filters": filter, "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"Updated watch." in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(3)
 | 
			
		||||
    res = client.get(url_for("index"))
 | 
			
		||||
    assert b'Unicode strings with encoding declaration are not supported.' not in res.data
 | 
			
		||||
 | 
			
		||||
@@ -167,12 +163,16 @@ def test_check_xpath_text_function_utf8(client, live_server):
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
    assert b'Deleted' in res.data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_check_markup_xpath_filter_restriction(client, live_server):
 | 
			
		||||
    sleep_time_for_fetch_thread = 3
 | 
			
		||||
 | 
			
		||||
    xpath_filter = "//*[contains(@class, 'sametext')]"
 | 
			
		||||
 | 
			
		||||
    set_original_response()
 | 
			
		||||
 | 
			
		||||
    # Give the endpoint time to spin up
 | 
			
		||||
    time.sleep(1)
 | 
			
		||||
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
    test_url = url_for('test_endpoint', _external=True)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
@@ -183,7 +183,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
 | 
			
		||||
    # Give the thread time to pick it up
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(sleep_time_for_fetch_thread)
 | 
			
		||||
 | 
			
		||||
    # Goto the edit page, add our ignore text
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
@@ -195,7 +195,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
 | 
			
		||||
    assert b"Updated watch." in res.data
 | 
			
		||||
 | 
			
		||||
    # Give the thread time to pick it up
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(sleep_time_for_fetch_thread)
 | 
			
		||||
 | 
			
		||||
    # view it/reset state back to viewed
 | 
			
		||||
    client.get(url_for("diff_history_page", uuid="first"), follow_redirects=True)
 | 
			
		||||
@@ -206,7 +206,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
 | 
			
		||||
    # Trigger a check
 | 
			
		||||
    client.get(url_for("form_watch_checknow"), follow_redirects=True)
 | 
			
		||||
    # Give the thread time to pick it up
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(sleep_time_for_fetch_thread)
 | 
			
		||||
 | 
			
		||||
    res = client.get(url_for("index"))
 | 
			
		||||
    assert b'unviewed' not in res.data
 | 
			
		||||
@@ -215,6 +215,10 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_xpath_validation(client, live_server):
 | 
			
		||||
 | 
			
		||||
    # Give the endpoint time to spin up
 | 
			
		||||
    time.sleep(1)
 | 
			
		||||
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
    test_url = url_for('test_endpoint', _external=True)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
@@ -223,7 +227,7 @@ def test_xpath_validation(client, live_server):
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(2)
 | 
			
		||||
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid="first"),
 | 
			
		||||
@@ -235,172 +239,14 @@ def test_xpath_validation(client, live_server):
 | 
			
		||||
    assert b'Deleted' in res.data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_xpath23_prefix_validation(client, live_server):
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
    test_url = url_for('test_endpoint', _external=True)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("import_page"),
 | 
			
		||||
        data={"urls": test_url},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid="first"),
 | 
			
		||||
        data={"include_filters": "xpath:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"is not a valid XPath expression" in res.data
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
    assert b'Deleted' in res.data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_xpath1_validation(client, live_server):
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
    test_url = url_for('test_endpoint', _external=True)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("import_page"),
 | 
			
		||||
        data={"urls": test_url},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid="first"),
 | 
			
		||||
        data={"include_filters": "xpath1:/something horrible", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"is not a valid XPath expression" in res.data
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
    assert b'Deleted' in res.data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# actually only really used by the distll.io importer, but could be handy too
 | 
			
		||||
def test_check_with_prefix_include_filters(client, live_server):
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
    assert b'Deleted' in res.data
 | 
			
		||||
 | 
			
		||||
    set_original_response()
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
    test_url = url_for('test_endpoint', _external=True)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("import_page"),
 | 
			
		||||
        data={"urls": test_url},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    # Give the endpoint time to spin up
 | 
			
		||||
    time.sleep(1)
 | 
			
		||||
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid="first"),
 | 
			
		||||
        data={"include_filters": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tags": "", "headers": "",
 | 
			
		||||
              'fetch_backend': "html_requests"},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"Updated watch." in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("preview_page", uuid="first"),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"Some text thats the same" in res.data  # in selector
 | 
			
		||||
    assert b"Some text that will change" not in res.data  # not in selector
 | 
			
		||||
 | 
			
		||||
    client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_various_rules(client, live_server):
 | 
			
		||||
    # Just check these don't error
 | 
			
		||||
    # live_server_setup(live_server)
 | 
			
		||||
    with open("test-datastore/endpoint-content.txt", "w") as f:
 | 
			
		||||
        f.write("""<html>
 | 
			
		||||
       <body>
 | 
			
		||||
     Some initial text<br>
 | 
			
		||||
     <p>Which is across multiple lines</p>
 | 
			
		||||
     <br>
 | 
			
		||||
     So let's see what happens.  <br>
 | 
			
		||||
     <div class="sametext">Some text thats the same</div>
 | 
			
		||||
     <div class="changetext">Some text that will change</div>
 | 
			
		||||
     <a href=''>some linky </a>
 | 
			
		||||
     <a href=''>another some linky </a>
 | 
			
		||||
     <!-- related to https://github.com/dgtlmoon/changedetection.io/pull/1774 -->
 | 
			
		||||
     <input   type="email"   id="email" />     
 | 
			
		||||
     </body>
 | 
			
		||||
     </html>
 | 
			
		||||
    """)
 | 
			
		||||
 | 
			
		||||
    test_url = url_for('test_endpoint', _external=True)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("import_page"),
 | 
			
		||||
        data={"urls": test_url},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    for r in ['//div', '//a', 'xpath://div', 'xpath://a']:
 | 
			
		||||
        res = client.post(
 | 
			
		||||
            url_for("edit_page", uuid="first"),
 | 
			
		||||
            data={"include_filters": r,
 | 
			
		||||
                  "url": test_url,
 | 
			
		||||
                  "tags": "",
 | 
			
		||||
                  "headers": "",
 | 
			
		||||
                  'fetch_backend': "html_requests"},
 | 
			
		||||
            follow_redirects=True
 | 
			
		||||
        )
 | 
			
		||||
        wait_for_all_checks(client)
 | 
			
		||||
        assert b"Updated watch." in res.data
 | 
			
		||||
        res = client.get(url_for("index"))
 | 
			
		||||
        assert b'fetch-error' not in res.data, f"Should not see errors after '{r} filter"
 | 
			
		||||
 | 
			
		||||
    res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
    assert b'Deleted' in res.data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_xpath_20(client, live_server):
 | 
			
		||||
    test_url = url_for('test_endpoint', _external=True)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("import_page"),
 | 
			
		||||
        data={"urls": test_url},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    set_original_response()
 | 
			
		||||
 | 
			
		||||
    test_url = url_for('test_endpoint', _external=True)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid="first"),
 | 
			
		||||
        data={"include_filters": "//*[contains(@class, 'sametext')]|//*[contains(@class, 'changetext')]",
 | 
			
		||||
              "url": test_url,
 | 
			
		||||
              "tags": "",
 | 
			
		||||
              "headers": "",
 | 
			
		||||
              'fetch_backend': "html_requests"},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"Updated watch." in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("preview_page", uuid="first"),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"Some text thats the same" in res.data  # in selector
 | 
			
		||||
    assert b"Some text that will change" in res.data  # in selector
 | 
			
		||||
 | 
			
		||||
    client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_xpath_20_function_count(client, live_server):
 | 
			
		||||
    set_original_response()
 | 
			
		||||
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
@@ -411,100 +257,23 @@ def test_xpath_20_function_count(client, live_server):
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(3)
 | 
			
		||||
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid="first"),
 | 
			
		||||
        data={"include_filters": "xpath:count(//div) * 123456789987654321",
 | 
			
		||||
              "url": test_url,
 | 
			
		||||
              "tags": "",
 | 
			
		||||
              "headers": "",
 | 
			
		||||
              'fetch_backend': "html_requests"},
 | 
			
		||||
        data={"include_filters":  "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"Updated watch." in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
    time.sleep(3)
 | 
			
		||||
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("preview_page", uuid="first"),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"246913579975308642" in res.data  # in selector
 | 
			
		||||
    assert b"Some text thats the same" in res.data #in selector
 | 
			
		||||
    assert b"Some text that will change" not in res.data #not in selector
 | 
			
		||||
 | 
			
		||||
    client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_xpath_20_function_count2(client, live_server):
 | 
			
		||||
    set_original_response()
 | 
			
		||||
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
    test_url = url_for('test_endpoint', _external=True)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("import_page"),
 | 
			
		||||
        data={"urls": test_url},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid="first"),
 | 
			
		||||
        data={"include_filters": "/html/body/count(div) * 123456789987654321",
 | 
			
		||||
              "url": test_url,
 | 
			
		||||
              "tags": "",
 | 
			
		||||
              "headers": "",
 | 
			
		||||
              'fetch_backend': "html_requests"},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"Updated watch." in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("preview_page", uuid="first"),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"246913579975308642" in res.data  # in selector
 | 
			
		||||
 | 
			
		||||
    client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_xpath_20_function_string_join_matches(client, live_server):
 | 
			
		||||
    set_original_response()
 | 
			
		||||
 | 
			
		||||
    # Add our URL to the import page
 | 
			
		||||
    test_url = url_for('test_endpoint', _external=True)
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("import_page"),
 | 
			
		||||
        data={"urls": test_url},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"1 Imported" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid="first"),
 | 
			
		||||
        data={
 | 
			
		||||
            "include_filters": "xpath:string-join(//*[contains(@class, 'sametext')]|//*[matches(@class, 'changetext')], 'specialconjunction')",
 | 
			
		||||
            "url": test_url,
 | 
			
		||||
            "tags": "",
 | 
			
		||||
            "headers": "",
 | 
			
		||||
            'fetch_backend': "html_requests"},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"Updated watch." in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("preview_page", uuid="first"),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert b"Some text thats the samespecialconjunctionSome text that will change" in res.data  # in selector
 | 
			
		||||
 | 
			
		||||
    client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -1,203 +0,0 @@
 | 
			
		||||
import sys
 | 
			
		||||
import os
 | 
			
		||||
import pytest
 | 
			
		||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
 | 
			
		||||
import html_tools
 | 
			
		||||
 | 
			
		||||
# test generation guide.
 | 
			
		||||
# 1. Do not include encoding in the xml declaration if the test object is a str type.
 | 
			
		||||
# 2. Always paraphrase test.
 | 
			
		||||
 | 
			
		||||
hotels = """
 | 
			
		||||
<hotel>
 | 
			
		||||
  <branch location="California">
 | 
			
		||||
    <staff>
 | 
			
		||||
      <given_name>Christopher</given_name>
 | 
			
		||||
      <surname>Anderson</surname>
 | 
			
		||||
      <age>25</age>
 | 
			
		||||
    </staff>
 | 
			
		||||
    <staff>
 | 
			
		||||
      <given_name>Christopher</given_name>
 | 
			
		||||
      <surname>Carter</surname>
 | 
			
		||||
      <age>30</age>
 | 
			
		||||
    </staff>
 | 
			
		||||
  </branch>
 | 
			
		||||
  <branch location="Las Vegas">
 | 
			
		||||
    <staff>
 | 
			
		||||
      <given_name>Lisa</given_name>
 | 
			
		||||
      <surname>Walker</surname>
 | 
			
		||||
      <age>60</age>
 | 
			
		||||
    </staff>
 | 
			
		||||
    <staff>
 | 
			
		||||
      <given_name>Jessica</given_name>
 | 
			
		||||
      <surname>Walker</surname>
 | 
			
		||||
      <age>32</age>
 | 
			
		||||
    </staff>
 | 
			
		||||
    <staff>
 | 
			
		||||
      <given_name>Jennifer</given_name>
 | 
			
		||||
      <surname>Roberts</surname>
 | 
			
		||||
      <age>50</age>
 | 
			
		||||
    </staff>
 | 
			
		||||
  </branch>
 | 
			
		||||
</hotel>"""
 | 
			
		||||
 | 
			
		||||
@pytest.mark.parametrize("html_content", [hotels])
 | 
			
		||||
@pytest.mark.parametrize("xpath, answer", [('(//staff/given_name, //staff/age)', '25'),
 | 
			
		||||
                          ("xs:date('2023-10-10')", '2023-10-10'),
 | 
			
		||||
                          ("if (/hotel/branch[@location = 'California']/staff[1]/age = 25) then 'is 25' else 'is not 25'", 'is 25'),
 | 
			
		||||
                          ("if (//hotel/branch[@location = 'California']/staff[1]/age = 25) then 'is 25' else 'is not 25'", 'is 25'),
 | 
			
		||||
                          ("if (count(/hotel/branch/staff) = 5) then true() else false()", 'true'),
 | 
			
		||||
                          ("if (count(//hotel/branch/staff) = 5) then true() else false()", 'true'),
 | 
			
		||||
                          ("for $i in /hotel/branch/staff return if ($i/age >= 40) then upper-case($i/surname) else lower-case($i/surname)", 'anderson'),
 | 
			
		||||
                          ("given_name  =  'Christopher' and age  =  40", 'false'),
 | 
			
		||||
                          ("//given_name  =  'Christopher' and //age  =  40", 'false'),
 | 
			
		||||
                          #("(staff/given_name, staff/age)", 'Lisa'),
 | 
			
		||||
                          ("(//staff/given_name, //staff/age)", 'Lisa'),
 | 
			
		||||
                          #("hotel/branch[@location = 'California']/staff/age union hotel/branch[@location = 'Las Vegas']/staff/age", ''),
 | 
			
		||||
                          ("(//hotel/branch[@location = 'California']/staff/age union //hotel/branch[@location = 'Las Vegas']/staff/age)", '60'),
 | 
			
		||||
                          ("(200 to 210)", "205"),
 | 
			
		||||
                          ("(//hotel/branch[@location = 'California']/staff/age union //hotel/branch[@location = 'Las Vegas']/staff/age)", "50"),
 | 
			
		||||
                          ("(1, 9, 9, 5)", "5"),
 | 
			
		||||
                          ("(3, (), (14, 15), 92, 653)", "653"),
 | 
			
		||||
                          ("for $i in /hotel/branch/staff return $i/given_name", "Christopher"),
 | 
			
		||||
                          ("for $i in //hotel/branch/staff return $i/given_name", "Christopher"),
 | 
			
		||||
                          ("distinct-values(for $i in /hotel/branch/staff return $i/given_name)", "Jessica"),
 | 
			
		||||
                          ("distinct-values(for $i in //hotel/branch/staff return $i/given_name)", "Jessica"),
 | 
			
		||||
                          ("for $i in (7 to  15) return $i*10", "130"),
 | 
			
		||||
                          ("some $i in /hotel/branch/staff satisfies $i/age < 20", "false"),
 | 
			
		||||
                          ("some $i in //hotel/branch/staff satisfies $i/age < 20", "false"),
 | 
			
		||||
                          ("every $i in /hotel/branch/staff satisfies $i/age > 20", "true"),
 | 
			
		||||
                          ("every $i in //hotel/branch/staff satisfies $i/age > 20 ", "true"),
 | 
			
		||||
                          ("let $x := branch[@location = 'California'], $y := branch[@location = 'Las Vegas'] return (avg($x/staff/age), avg($y/staff/age))", "27.5"),
 | 
			
		||||
                          ("let $x := //branch[@location = 'California'], $y := //branch[@location = 'Las Vegas'] return (avg($x/staff/age), avg($y/staff/age))", "27.5"),
 | 
			
		||||
                          ("let $nu := 1, $de := 1000 return  'probability = ' || $nu div $de * 100 || '%'", "0.1%"),
 | 
			
		||||
                          ("let $nu := 2, $probability := function ($argument) { 'probability = ' ||  $nu div $argument  * 100 || '%'}, $de := 5 return $probability($de)", "40%"),
 | 
			
		||||
                          ("'XPATH2.0-3.1 dissemination' instance of xs:string ", "true"),
 | 
			
		||||
                          ("'new stackoverflow question incoming' instance of xs:integer ", "false"),
 | 
			
		||||
                          ("'50000' cast as xs:integer", "50000"),
 | 
			
		||||
                          ("//branch[@location = 'California']/staff[1]/surname eq 'Anderson'", "true"),
 | 
			
		||||
                          ("fn:false()", "false")])
 | 
			
		||||
def test_hotels(html_content, xpath, answer):
 | 
			
		||||
    html_content = html_tools.xpath_filter(xpath, html_content, append_pretty_line_formatting=True)
 | 
			
		||||
    assert type(html_content) == str
 | 
			
		||||
    assert answer in html_content
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
branches_to_visit = """<?xml version="1.0" ?>
 | 
			
		||||
  <branches_to_visit>
 | 
			
		||||
     <manager name="Godot" room_no="501">
 | 
			
		||||
         <branch>Area 51</branch>
 | 
			
		||||
         <branch>A place with no name</branch>
 | 
			
		||||
         <branch>Stalsk12</branch>
 | 
			
		||||
     </manager>
 | 
			
		||||
      <manager name="Freya" room_no="305">
 | 
			
		||||
         <branch>Stalsk12</branch>
 | 
			
		||||
         <branch>Barcelona</branch>
 | 
			
		||||
         <branch>Paris</branch>
 | 
			
		||||
     </manager>
 | 
			
		||||
 </branches_to_visit>"""
 | 
			
		||||
@pytest.mark.parametrize("html_content", [branches_to_visit])
 | 
			
		||||
@pytest.mark.parametrize("xpath, answer", [
 | 
			
		||||
    ("manager[@name = 'Godot']/branch union manager[@name = 'Freya']/branch", "Area 51"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch union //manager[@name = 'Freya']/branch", "Stalsk12"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch | manager[@name = 'Freya']/branch", "Stalsk12"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch | //manager[@name = 'Freya']/branch", "Stalsk12"),
 | 
			
		||||
    ("manager/branch intersect manager[@name = 'Godot']/branch", "A place with no name"),
 | 
			
		||||
    ("//manager/branch intersect //manager[@name = 'Godot']/branch", "A place with no name"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch intersect manager[@name = 'Freya']/branch", ""),
 | 
			
		||||
    ("manager/branch except manager[@name = 'Godot']/branch", "Barcelona"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch[1]  eq 'Area 51'", "true"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch[1]  eq 'Area 51'", "true"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch[1]  eq 'Seoul'", "false"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch[1]  eq 'Seoul'", "false"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch[2] eq manager[@name = 'Freya']/branch[2]", "false"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch[2] eq //manager[@name = 'Freya']/branch[2]", "false"),
 | 
			
		||||
    ("manager[1]/@room_no lt manager[2]/@room_no", "false"),
 | 
			
		||||
    ("//manager[1]/@room_no lt //manager[2]/@room_no", "false"),
 | 
			
		||||
    ("manager[1]/@room_no gt manager[2]/@room_no", "true"),
 | 
			
		||||
    ("//manager[1]/@room_no gt //manager[2]/@room_no", "true"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch[1]  = 'Area 51'", "true"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch[1]  = 'Area 51'", "true"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch[1]  = 'Seoul'", "false"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch[1]  = 'Seoul'", "false"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch  = 'Area 51'", "true"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch  = 'Area 51'", "true"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch  = 'Barcelona'", "false"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch  = 'Barcelona'", "false"),
 | 
			
		||||
    ("manager[1]/@room_no > manager[2]/@room_no", "true"),
 | 
			
		||||
    ("//manager[1]/@room_no > //manager[2]/@room_no", "true"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch[ . = 'Stalsk12'] is manager[1]/branch[1]", "false"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch[ . = 'Stalsk12'] is //manager[1]/branch[1]", "false"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch[ . = 'Stalsk12'] is manager[1]/branch[3]", "true"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch[ . = 'Stalsk12'] is //manager[1]/branch[3]", "true"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch[ . = 'Stalsk12'] <<  manager[1]/branch[1]", "false"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch[ . = 'Stalsk12'] <<  //manager[1]/branch[1]", "false"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch[ . = 'Stalsk12']  >>  manager[1]/branch[1]", "true"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch[ . = 'Stalsk12'] >>  //manager[1]/branch[1]", "true"),
 | 
			
		||||
    ("manager[@name = 'Godot']/branch[ . = 'Stalsk12'] is manager[@name = 'Freya']/branch[ . = 'Stalsk12']", "false"),
 | 
			
		||||
    ("//manager[@name = 'Godot']/branch[ . = 'Stalsk12'] is //manager[@name = 'Freya']/branch[ . = 'Stalsk12']", "false"),
 | 
			
		||||
    ("manager[1]/@name || manager[2]/@name", "GodotFreya"),
 | 
			
		||||
    ("//manager[1]/@name || //manager[2]/@name", "GodotFreya"),
 | 
			
		||||
                          ])
 | 
			
		||||
def test_branches_to_visit(html_content, xpath, answer):
 | 
			
		||||
    html_content = html_tools.xpath_filter(xpath, html_content, append_pretty_line_formatting=True)
 | 
			
		||||
    assert type(html_content) == str
 | 
			
		||||
    assert answer in html_content
 | 
			
		||||
 | 
			
		||||
trips = """
 | 
			
		||||
<trips>
 | 
			
		||||
   <trip reservation_number="10">
 | 
			
		||||
       <depart>2023-10-06</depart>
 | 
			
		||||
       <arrive>2023-10-10</arrive>
 | 
			
		||||
       <traveler name="Christopher Anderson">
 | 
			
		||||
           <duration>4</duration>
 | 
			
		||||
           <price>2000.00</price>
 | 
			
		||||
       </traveler>
 | 
			
		||||
   </trip>
 | 
			
		||||
   <trip reservation_number="12">
 | 
			
		||||
       <depart>2023-10-06</depart>
 | 
			
		||||
       <arrive>2023-10-12</arrive>
 | 
			
		||||
       <traveler name="Frank Carter">
 | 
			
		||||
           <duration>6</duration>
 | 
			
		||||
           <price>3500.34</price>
 | 
			
		||||
       </traveler>
 | 
			
		||||
   </trip>
 | 
			
		||||
</trips>"""
 | 
			
		||||
@pytest.mark.parametrize("html_content", [trips])
 | 
			
		||||
@pytest.mark.parametrize("xpath, answer", [
 | 
			
		||||
    ("1 + 9 * 9 + 5 div 5", "83"),
 | 
			
		||||
    ("(1 + 9 * 9 + 5) div 6", "14.5"),
 | 
			
		||||
    ("23 idiv 3", "7"),
 | 
			
		||||
    ("23 div 3", "7.66666666"),
 | 
			
		||||
    ("for $i in ./trip return $i/traveler/duration * $i/traveler/price", "21002.04"),
 | 
			
		||||
    ("for $i in ./trip return $i/traveler/duration ", "4"),
 | 
			
		||||
    ("for $i in .//trip return $i/traveler/duration * $i/traveler/price", "21002.04"),
 | 
			
		||||
    ("sum(for $i in ./trip return $i/traveler/duration * $i/traveler/price)", "29002.04"),
 | 
			
		||||
    ("sum(for $i in .//trip return $i/traveler/duration * $i/traveler/price)", "29002.04"),
 | 
			
		||||
    #("trip[1]/depart - trip[1]/arrive", "fail_to_get_answer"),
 | 
			
		||||
    #("//trip[1]/depart - //trip[1]/arrive", "fail_to_get_answer"),
 | 
			
		||||
    #("trip[1]/depart + trip[1]/arrive", "fail_to_get_answer"),
 | 
			
		||||
    #("xs:date(trip[1]/depart) + xs:date(trip[1]/arrive)", "fail_to_get_answer"),
 | 
			
		||||
    ("(//trip[1]/arrive cast as xs:date) - (//trip[1]/depart cast as xs:date)", "P4D"),
 | 
			
		||||
    ("(//trip[1]/depart cast as xs:date) - (//trip[1]/arrive cast as xs:date)", "-P4D"),
 | 
			
		||||
    ("(//trip[1]/depart cast as xs:date) + xs:dayTimeDuration('P3D')", "2023-10-09"),
 | 
			
		||||
    ("(//trip[1]/depart cast as xs:date) - xs:dayTimeDuration('P3D')", "2023-10-03"),
 | 
			
		||||
    ("(456, 623) instance of xs:integer", "false"),
 | 
			
		||||
    ("(456, 623) instance of xs:integer*", "true"),
 | 
			
		||||
    ("/trips/trip instance of element()", "false"),
 | 
			
		||||
    ("/trips/trip instance of element()*", "true"),
 | 
			
		||||
    ("/trips/trip[1]/arrive instance of xs:date", "false"),
 | 
			
		||||
    ("date(/trips/trip[1]/arrive) instance of xs:date", "true"),
 | 
			
		||||
    ("'8' cast as xs:integer", "8"),
 | 
			
		||||
    ("'11.1E3' cast as xs:double", "11100"),
 | 
			
		||||
    ("6.5 cast as xs:integer", "6"),
 | 
			
		||||
    #("/trips/trip[1]/arrive cast as xs:dateTime", "fail_to_get_answer"),
 | 
			
		||||
    ("/trips/trip[1]/arrive cast as xs:date", "2023-10-10"),
 | 
			
		||||
    ("('2023-10-12') cast as xs:date", "2023-10-12"),
 | 
			
		||||
    ("for $i in //trip return concat($i/depart, '  ', $i/arrive)", "2023-10-06  2023-10-10"),
 | 
			
		||||
                          ])
 | 
			
		||||
def test_trips(html_content, xpath, answer):
 | 
			
		||||
    html_content = html_tools.xpath_filter(xpath, html_content, append_pretty_line_formatting=True)
 | 
			
		||||
    assert type(html_content) == str
 | 
			
		||||
    assert answer in html_content
 | 
			
		||||
@@ -1,54 +0,0 @@
 | 
			
		||||
#!/usr/bin/python3
 | 
			
		||||
 | 
			
		||||
# run from dir above changedetectionio/ dir
 | 
			
		||||
# python3 -m unittest changedetectionio.tests.unit.test_notification_diff
 | 
			
		||||
 | 
			
		||||
import unittest
 | 
			
		||||
import os
 | 
			
		||||
 | 
			
		||||
from changedetectionio.model import Watch
 | 
			
		||||
 | 
			
		||||
# mostly
 | 
			
		||||
class TestDiffBuilder(unittest.TestCase):
 | 
			
		||||
 | 
			
		||||
    def test_watch_get_suggested_from_diff_timestamp(self):
 | 
			
		||||
        import uuid as uuid_builder
 | 
			
		||||
        watch = Watch.model(datastore_path='/tmp', default={})
 | 
			
		||||
        watch.ensure_data_dir_exists()
 | 
			
		||||
 | 
			
		||||
        watch['last_viewed'] = 110
 | 
			
		||||
 | 
			
		||||
        watch.save_history_text(contents=b"hello world", timestamp=100, snapshot_id=str(uuid_builder.uuid4()))
 | 
			
		||||
        watch.save_history_text(contents=b"hello world", timestamp=105, snapshot_id=str(uuid_builder.uuid4()))
 | 
			
		||||
        watch.save_history_text(contents=b"hello world", timestamp=109, snapshot_id=str(uuid_builder.uuid4()))
 | 
			
		||||
        watch.save_history_text(contents=b"hello world", timestamp=112, snapshot_id=str(uuid_builder.uuid4()))
 | 
			
		||||
        watch.save_history_text(contents=b"hello world", timestamp=115, snapshot_id=str(uuid_builder.uuid4()))
 | 
			
		||||
        watch.save_history_text(contents=b"hello world", timestamp=117, snapshot_id=str(uuid_builder.uuid4()))
 | 
			
		||||
 | 
			
		||||
        p = watch.get_next_snapshot_key_to_last_viewed
 | 
			
		||||
        assert p == "112", "Correct last-viewed timestamp was detected"
 | 
			
		||||
 | 
			
		||||
        # When there is only one step of difference from the end of the list, it should return second-last change
 | 
			
		||||
        watch['last_viewed'] = 116
 | 
			
		||||
        p = watch.get_next_snapshot_key_to_last_viewed
 | 
			
		||||
        assert p == "115", "Correct 'second last' last-viewed timestamp was detected when using the last timestamp"
 | 
			
		||||
 | 
			
		||||
        watch['last_viewed'] = 99
 | 
			
		||||
        p = watch.get_next_snapshot_key_to_last_viewed
 | 
			
		||||
        assert p == "100"
 | 
			
		||||
 | 
			
		||||
        watch['last_viewed'] = 200
 | 
			
		||||
        p = watch.get_next_snapshot_key_to_last_viewed
 | 
			
		||||
        assert p == "115", "When the 'last viewed' timestamp is greater than the newest snapshot, return second last "
 | 
			
		||||
 | 
			
		||||
        watch['last_viewed'] = 109
 | 
			
		||||
        p = watch.get_next_snapshot_key_to_last_viewed
 | 
			
		||||
        assert p == "109", "Correct when its the same time"
 | 
			
		||||
 | 
			
		||||
        # new empty one
 | 
			
		||||
        watch = Watch.model(datastore_path='/tmp', default={})
 | 
			
		||||
        p = watch.get_next_snapshot_key_to_last_viewed
 | 
			
		||||
        assert p == None, "None when no history available"
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
    unittest.main()
 | 
			
		||||
@@ -1,19 +1,18 @@
 | 
			
		||||
#!/usr/bin/python3
 | 
			
		||||
 | 
			
		||||
import time
 | 
			
		||||
import os
 | 
			
		||||
from flask import url_for
 | 
			
		||||
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
 | 
			
		||||
 | 
			
		||||
def test_setup(client, live_server):
 | 
			
		||||
    live_server_setup(live_server)
 | 
			
		||||
 | 
			
		||||
# Add a site in paused mode, add an invalid filter, we should still have visual selector data ready
 | 
			
		||||
def test_visual_selector_content_ready(client, live_server):
 | 
			
		||||
    import os
 | 
			
		||||
    import json
 | 
			
		||||
 | 
			
		||||
    assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
 | 
			
		||||
    time.sleep(1)
 | 
			
		||||
    live_server_setup(live_server)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    # Add our URL to the import page, because the docker container (playwright/selenium) wont be able to connect to our usual test url
 | 
			
		||||
    test_url = "https://changedetection.io/ci-test/test-runjs.html"
 | 
			
		||||
@@ -54,13 +53,6 @@ def test_visual_selector_content_ready(client, live_server):
 | 
			
		||||
    with open(os.path.join('test-datastore', uuid, 'elements.json'), 'r') as f:
 | 
			
		||||
        json.load(f)
 | 
			
		||||
 | 
			
		||||
    # Attempt to fetch it via the web hook that the browser would use
 | 
			
		||||
    res = client.get(url_for('static_content', group='visual_selector_data', filename=uuid))
 | 
			
		||||
    json.loads(res.data)
 | 
			
		||||
    assert res.mimetype == 'application/json'
 | 
			
		||||
    assert res.status_code == 200
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    # Some options should be enabled
 | 
			
		||||
    # @todo - in the future, the visibility should be toggled by JS from the request type setting
 | 
			
		||||
    res = client.get(
 | 
			
		||||
@@ -68,75 +60,4 @@ def test_visual_selector_content_ready(client, live_server):
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b'notification_screenshot' in res.data
 | 
			
		||||
    client.get(
 | 
			
		||||
        url_for("form_delete", uuid="all"),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
def test_basic_browserstep(client, live_server):
 | 
			
		||||
 | 
			
		||||
    assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
 | 
			
		||||
    #live_server_setup(live_server)
 | 
			
		||||
 | 
			
		||||
    # Add our URL to the import page, because the docker container (playwright/selenium) wont be able to connect to our usual test url
 | 
			
		||||
    test_url = "https://changedetection.io/ci-test/test-runjs.html"
 | 
			
		||||
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("form_quick_watch_add"),
 | 
			
		||||
        data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"Watch added in Paused state, saving will unpause" in res.data
 | 
			
		||||
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid="first", unpause_on_save=1),
 | 
			
		||||
        data={
 | 
			
		||||
              "url": test_url,
 | 
			
		||||
              "tags": "",
 | 
			
		||||
              "headers": "",
 | 
			
		||||
              'fetch_backend': "html_webdriver",
 | 
			
		||||
              'browser_steps-0-operation': 'Goto site',
 | 
			
		||||
              'browser_steps-1-operation': 'Click element',
 | 
			
		||||
              'browser_steps-1-selector': 'button[name=test-button]',
 | 
			
		||||
              'browser_steps-1-optional_value': ''
 | 
			
		||||
        },
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"unpaused" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    uuid = extract_UUID_from_client(client)
 | 
			
		||||
 | 
			
		||||
    # Check HTML conversion detected and workd
 | 
			
		||||
    res = client.get(
 | 
			
		||||
        url_for("preview_page", uuid=uuid),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"This text should be removed" not in res.data
 | 
			
		||||
    assert b"I smell JavaScript because the button was pressed" in res.data
 | 
			
		||||
 | 
			
		||||
    # now test for 404 errors
 | 
			
		||||
    res = client.post(
 | 
			
		||||
        url_for("edit_page", uuid=uuid, unpause_on_save=1),
 | 
			
		||||
        data={
 | 
			
		||||
              "url": "https://changedetection.io/404",
 | 
			
		||||
              "tags": "",
 | 
			
		||||
              "headers": "",
 | 
			
		||||
              'fetch_backend': "html_webdriver",
 | 
			
		||||
              'browser_steps-0-operation': 'Goto site',
 | 
			
		||||
              'browser_steps-1-operation': 'Click element',
 | 
			
		||||
              'browser_steps-1-selector': 'button[name=test-button]',
 | 
			
		||||
              'browser_steps-1-optional_value': ''
 | 
			
		||||
        },
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
    assert b"unpaused" in res.data
 | 
			
		||||
    wait_for_all_checks(client)
 | 
			
		||||
 | 
			
		||||
    res = client.get(url_for("index"))
 | 
			
		||||
    assert b'Error - 404' in res.data
 | 
			
		||||
 | 
			
		||||
    client.get(
 | 
			
		||||
        url_for("form_delete", uuid="all"),
 | 
			
		||||
        follow_redirects=True
 | 
			
		||||
    )
 | 
			
		||||
@@ -209,7 +209,6 @@ class update_worker(threading.Thread):
 | 
			
		||||
        from .processors import text_json_diff, restock_diff
 | 
			
		||||
 | 
			
		||||
        while not self.app.config.exit.is_set():
 | 
			
		||||
            update_handler = None
 | 
			
		||||
 | 
			
		||||
            try:
 | 
			
		||||
                queued_item_data = self.q.get(block=False)
 | 
			
		||||
@@ -230,36 +229,16 @@ class update_worker(threading.Thread):
 | 
			
		||||
                    now = time.time()
 | 
			
		||||
 | 
			
		||||
                    try:
 | 
			
		||||
                        # Processor is what we are using for detecting the "Change"
 | 
			
		||||
                        processor = self.datastore.data['watching'][uuid].get('processor', 'text_json_diff')
 | 
			
		||||
                        # if system...
 | 
			
		||||
 | 
			
		||||
                        # Abort processing when the content was the same as the last fetch
 | 
			
		||||
                        skip_when_same_checksum = queued_item_data.item.get('skip_when_checksum_same')
 | 
			
		||||
 | 
			
		||||
                        processor = self.datastore.data['watching'][uuid].get('processor','text_json_diff')
 | 
			
		||||
 | 
			
		||||
                        # @todo some way to switch by name
 | 
			
		||||
                        # Init a new 'difference_detection_processor'
 | 
			
		||||
 | 
			
		||||
                        if processor == 'restock_diff':
 | 
			
		||||
                            update_handler = restock_diff.perform_site_check(datastore=self.datastore,
 | 
			
		||||
                                                                             watch_uuid=uuid
 | 
			
		||||
                                                                             )
 | 
			
		||||
                            update_handler = restock_diff.perform_site_check(datastore=self.datastore)
 | 
			
		||||
                        else:
 | 
			
		||||
                            # Used as a default and also by some tests
 | 
			
		||||
                            update_handler = text_json_diff.perform_site_check(datastore=self.datastore,
 | 
			
		||||
                                                                               watch_uuid=uuid
 | 
			
		||||
                                                                               )
 | 
			
		||||
 | 
			
		||||
                        # Clear last errors (move to preflight func?)
 | 
			
		||||
                        self.datastore.data['watching'][uuid]['browser_steps_last_error_step'] = None
 | 
			
		||||
 | 
			
		||||
                        update_handler.call_browser()
 | 
			
		||||
 | 
			
		||||
                        changed_detected, update_obj, contents = update_handler.run_changedetection(uuid,
 | 
			
		||||
                                                                                    skip_when_checksum_same=skip_when_same_checksum,
 | 
			
		||||
                                                                                    )
 | 
			
		||||
                            update_handler = text_json_diff.perform_site_check(datastore=self.datastore)
 | 
			
		||||
 | 
			
		||||
                        changed_detected, update_obj, contents = update_handler.run(uuid, skip_when_checksum_same=queued_item_data.item.get('skip_when_checksum_same'))
 | 
			
		||||
                        # Re #342
 | 
			
		||||
                        # In Python 3, all strings are sequences of Unicode characters. There is a bytes type that holds raw bytes.
 | 
			
		||||
                        # We then convert/.decode('utf-8') for the notification etc
 | 
			
		||||
@@ -345,13 +324,8 @@ class update_worker(threading.Thread):
 | 
			
		||||
                        if not self.datastore.data['watching'].get(uuid):
 | 
			
		||||
                            continue
 | 
			
		||||
 | 
			
		||||
                        error_step = e.step_n + 1
 | 
			
		||||
                        err_text = f"Warning, browser step at position {error_step} could not run, target not found, check the watch, add a delay if necessary, view Browser Steps to see screenshot at that step"
 | 
			
		||||
                        self.datastore.update_watch(uuid=uuid,
 | 
			
		||||
                                                    update_obj={'last_error': err_text,
 | 
			
		||||
                                                                'browser_steps_last_error_step': error_step
 | 
			
		||||
                                                                }
 | 
			
		||||
                                                    )
 | 
			
		||||
                        err_text = "Warning, browser step at position {} could not run, target not found, check the watch, add a delay if necessary.".format(e.step_n+1)
 | 
			
		||||
                        self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
                        if self.datastore.data['watching'][uuid].get('filter_failure_notification_send', False):
 | 
			
		||||
@@ -410,9 +384,6 @@ class update_worker(threading.Thread):
 | 
			
		||||
                        self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
 | 
			
		||||
                        # Other serious error
 | 
			
		||||
                        process_changedetection_results = False
 | 
			
		||||
#                        import traceback
 | 
			
		||||
#                        print(traceback.format_exc())
 | 
			
		||||
 | 
			
		||||
                    else:
 | 
			
		||||
                        # Crash protection, the watch entry could have been removed by this point (during a slow chrome fetch etc)
 | 
			
		||||
                        if not self.datastore.data['watching'].get(uuid):
 | 
			
		||||
 
 | 
			
		||||
@@ -66,12 +66,25 @@ services:
 | 
			
		||||
#        browser-chrome:
 | 
			
		||||
#            condition: service_started
 | 
			
		||||
 | 
			
		||||
#    browser-chrome:
 | 
			
		||||
#        hostname: browser-chrome
 | 
			
		||||
#        image: selenium/standalone-chrome-debug:3.141.59
 | 
			
		||||
#        environment:
 | 
			
		||||
#            - VNC_NO_PASSWORD=1
 | 
			
		||||
#            - SCREEN_WIDTH=1920
 | 
			
		||||
#            - SCREEN_HEIGHT=1080
 | 
			
		||||
#            - SCREEN_DEPTH=24
 | 
			
		||||
#        volumes:
 | 
			
		||||
#            # Workaround to avoid the browser crashing inside a docker container
 | 
			
		||||
#            # See https://github.com/SeleniumHQ/docker-selenium#quick-start
 | 
			
		||||
#            - /dev/shm:/dev/shm
 | 
			
		||||
#        restart: unless-stopped
 | 
			
		||||
 | 
			
		||||
     # Used for fetching pages via Playwright+Chrome where you need Javascript support.
 | 
			
		||||
     # Note: Playwright/browserless not supported on ARM type devices (rPi etc)
 | 
			
		||||
     # RECOMMENDED FOR FETCHING PAGES WITH CHROME
 | 
			
		||||
 | 
			
		||||
#    playwright-chrome:
 | 
			
		||||
#        hostname: playwright-chrome
 | 
			
		||||
#        image: browserless/chrome:1.60-chrome-stable
 | 
			
		||||
#        image: browserless/chrome
 | 
			
		||||
#        restart: unless-stopped
 | 
			
		||||
#        environment:
 | 
			
		||||
#            - SCREEN_WIDTH=1920
 | 
			
		||||
@@ -88,23 +101,6 @@ services:
 | 
			
		||||
#             Ignore HTTPS errors, like for self-signed certs
 | 
			
		||||
#            - DEFAULT_IGNORE_HTTPS_ERRORS=true
 | 
			
		||||
#
 | 
			
		||||
 | 
			
		||||
     # Used for fetching pages via Playwright+Chrome where you need Javascript support.
 | 
			
		||||
     # Note: works well but is deprecated, doesnt fetch full page screenshots and other issues
 | 
			
		||||
#    browser-chrome:
 | 
			
		||||
#        hostname: browser-chrome
 | 
			
		||||
#        image: selenium/standalone-chrome:4
 | 
			
		||||
#        environment:
 | 
			
		||||
#            - VNC_NO_PASSWORD=1
 | 
			
		||||
#            - SCREEN_WIDTH=1920
 | 
			
		||||
#            - SCREEN_HEIGHT=1080
 | 
			
		||||
#            - SCREEN_DEPTH=24
 | 
			
		||||
#        volumes:
 | 
			
		||||
#            # Workaround to avoid the browser crashing inside a docker container
 | 
			
		||||
#            # See https://github.com/SeleniumHQ/docker-selenium#quick-start
 | 
			
		||||
#            - /dev/shm:/dev/shm
 | 
			
		||||
#        restart: unless-stopped
 | 
			
		||||
 | 
			
		||||
volumes:
 | 
			
		||||
  changedetection-data:
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -1,13 +1,12 @@
 | 
			
		||||
eventlet>=0.33.3 # related to dnspython fixes
 | 
			
		||||
eventlet>=0.31.0
 | 
			
		||||
feedgen~=0.9
 | 
			
		||||
flask-compress
 | 
			
		||||
# 0.6.3 included compatibility fix for werkzeug 3.x (2.x had deprecation of url handlers)
 | 
			
		||||
flask-login>=0.6.3
 | 
			
		||||
flask-login~=0.5
 | 
			
		||||
flask-paginate
 | 
			
		||||
flask_expects_json~=1.7
 | 
			
		||||
flask_restful
 | 
			
		||||
flask_wtf~=1.2
 | 
			
		||||
flask~=2.3
 | 
			
		||||
flask_wtf
 | 
			
		||||
flask~=2.0
 | 
			
		||||
inscriptis~=2.2
 | 
			
		||||
pytz
 | 
			
		||||
timeago~=1.0
 | 
			
		||||
@@ -25,12 +24,16 @@ chardet>2.3.0
 | 
			
		||||
wtforms~=3.0
 | 
			
		||||
jsonpath-ng~=1.5.3
 | 
			
		||||
 | 
			
		||||
dnspython~=2.4 # related to eventlet fixes
 | 
			
		||||
 | 
			
		||||
# dnspython 2.3.0 is not compatible with eventlet
 | 
			
		||||
# * https://github.com/eventlet/eventlet/issues/781
 | 
			
		||||
# * https://datastax-oss.atlassian.net/browse/PYTHON-1320
 | 
			
		||||
dnspython<2.3.0
 | 
			
		||||
 | 
			
		||||
# jq not available on Windows so must be installed manually
 | 
			
		||||
 | 
			
		||||
# Notification library
 | 
			
		||||
apprise~=1.6.0
 | 
			
		||||
apprise~=1.5.0
 | 
			
		||||
 | 
			
		||||
# apprise mqtt https://github.com/dgtlmoon/changedetection.io/issues/315
 | 
			
		||||
paho-mqtt
 | 
			
		||||
@@ -46,21 +49,21 @@ beautifulsoup4
 | 
			
		||||
# XPath filtering, lxml is required by bs4 anyway, but put it here to be safe.
 | 
			
		||||
lxml
 | 
			
		||||
 | 
			
		||||
# XPath 2.0-3.1 support
 | 
			
		||||
elementpath
 | 
			
		||||
# 3.141 was missing socksVersion, 3.150 was not in pypi, so we try 4.1.0
 | 
			
		||||
selenium~=4.1.0
 | 
			
		||||
 | 
			
		||||
selenium~=4.14.0
 | 
			
		||||
 | 
			
		||||
werkzeug~=3.0
 | 
			
		||||
# https://stackoverflow.com/questions/71652965/importerror-cannot-import-name-safe-str-cmp-from-werkzeug-security/71653849#71653849
 | 
			
		||||
# ImportError: cannot import name 'safe_str_cmp' from 'werkzeug.security'
 | 
			
		||||
# need to revisit flask login versions
 | 
			
		||||
werkzeug~=2.0.0
 | 
			
		||||
 | 
			
		||||
# Templating, so far just in the URLs but in the future can be for the notifications also
 | 
			
		||||
jinja2~=3.1
 | 
			
		||||
jinja2-time
 | 
			
		||||
openpyxl
 | 
			
		||||
 | 
			
		||||
# https://peps.python.org/pep-0508/#environment-markers
 | 
			
		||||
# https://github.com/dgtlmoon/changedetection.io/pull/1009
 | 
			
		||||
jq~=1.3; python_version >= "3.8" and sys_platform == "darwin"
 | 
			
		||||
jq~=1.3; python_version >= "3.8" and sys_platform == "linux"
 | 
			
		||||
jq~=1.3 ;python_version >= "3.8" and sys_platform == "linux"
 | 
			
		||||
 | 
			
		||||
# Any current modern version, required so far for screenshot PNG->JPEG conversion but will be used more in the future
 | 
			
		||||
pillow
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user