mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-11-01 15:17:20 +00:00
Compare commits
48 Commits
puppeteer-
...
lock-mqtt-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d1d3362188 | ||
|
|
1ba29655f5 | ||
|
|
830a0a3a82 | ||
|
|
e110b3ee93 | ||
|
|
3ae9bfa6f9 | ||
|
|
6f3c3b7dfb | ||
|
|
74707909f1 | ||
|
|
d4dac23ba1 | ||
|
|
f9954f93f3 | ||
|
|
1a43b112dc | ||
|
|
db59bf73e1 | ||
|
|
8aac7bccbe | ||
|
|
9449c59fbb | ||
|
|
21f4ba2208 | ||
|
|
daef1cd036 | ||
|
|
56b365df40 | ||
|
|
8e5bf91965 | ||
|
|
1ae59551be | ||
|
|
a176468fb8 | ||
|
|
8fac593201 | ||
|
|
e3b8c0f5af | ||
|
|
514fd7f91e | ||
|
|
38c4768b92 | ||
|
|
6555d99044 | ||
|
|
e719dbd19b | ||
|
|
b28a8316cc | ||
|
|
e609a2d048 | ||
|
|
994d34c776 | ||
|
|
de776800e9 | ||
|
|
8b8ed58f20 | ||
|
|
79c6d765de | ||
|
|
c6db7fc90e | ||
|
|
bc587efae2 | ||
|
|
6ee6be1a5f | ||
|
|
c83485094b | ||
|
|
387ce32e6f | ||
|
|
6b9a788d75 | ||
|
|
14e632bc19 | ||
|
|
52c895b2e8 | ||
|
|
a62043e086 | ||
|
|
3d390b6ea4 | ||
|
|
301a40ca34 | ||
|
|
1c099cdba6 | ||
|
|
af747e6e3f | ||
|
|
aefad0bdf6 | ||
|
|
904ef84f82 | ||
|
|
d2569ba715 | ||
|
|
ccb42bcb12 |
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -4,6 +4,10 @@ updates:
|
||||
directory: /
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
"caronc/apprise":
|
||||
versioning-strategy: "increase"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
groups:
|
||||
all:
|
||||
patterns:
|
||||
|
||||
13
.github/workflows/test-only.yml
vendored
13
.github/workflows/test-only.yml
vendored
@@ -72,7 +72,11 @@ jobs:
|
||||
run: |
|
||||
# Playwright via Sockpuppetbrowser fetch
|
||||
# tests/visualselector/test_fetch_data.py will do browser steps
|
||||
docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
|
||||
docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/fetchers/test_content.py'
|
||||
docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_errorhandling.py'
|
||||
docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/visualselector/test_fetch_data.py'
|
||||
docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/fetchers/test_custom_js_before_content.py'
|
||||
|
||||
|
||||
- name: Playwright and SocketPuppetBrowser - Headers and requests
|
||||
run: |
|
||||
@@ -87,8 +91,11 @@ jobs:
|
||||
# STRAIGHT TO CDP
|
||||
- name: Pyppeteer and SocketPuppetBrowser - Specific tests in built container
|
||||
run: |
|
||||
# Playwright via Sockpuppetbrowser fetch
|
||||
docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" -e "FAST_PUPPETEER_CHROME_FETCHER=True" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
|
||||
# Playwright via Sockpuppetbrowser fetch
|
||||
docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "FAST_PUPPETEER_CHROME_FETCHER=True" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/fetchers/test_content.py'
|
||||
docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "FAST_PUPPETEER_CHROME_FETCHER=True" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_errorhandling.py'
|
||||
docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "FAST_PUPPETEER_CHROME_FETCHER=True" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/visualselector/test_fetch_data.py'
|
||||
docker run --rm -e "FLASK_SERVER_NAME=cdio" -e "FAST_PUPPETEER_CHROME_FETCHER=True" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000" --network changedet-network --hostname=cdio test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/fetchers/test_custom_js_before_content.py'
|
||||
|
||||
- name: Pyppeteer and SocketPuppetBrowser - Headers and requests checks
|
||||
run: |
|
||||
|
||||
@@ -2,7 +2,7 @@ Contributing is always welcome!
|
||||
|
||||
I am no professional flask developer, if you know a better way that something can be done, please let me know!
|
||||
|
||||
Otherwise, it's always best to PR into the `dev` branch.
|
||||
Otherwise, it's always best to PR into the `master` branch.
|
||||
|
||||
Please be sure that all new functionality has a matching test!
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
recursive-include changedetectionio/api *
|
||||
recursive-include changedetectionio/blueprint *
|
||||
recursive-include changedetectionio/content_fetchers *
|
||||
recursive-include changedetectionio/model *
|
||||
recursive-include changedetectionio/processors *
|
||||
recursive-include changedetectionio/res *
|
||||
recursive-include changedetectionio/static *
|
||||
recursive-include changedetectionio/templates *
|
||||
recursive-include changedetectionio/tests *
|
||||
|
||||
@@ -91,6 +91,14 @@ We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) glob
|
||||
|
||||
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||
|
||||
### We have a Chrome extension!
|
||||
|
||||
Easily add the current web page to your changedetection.io tool, simply install the extension and click "Sync" to connect it to your existing changedetection.io install.
|
||||
|
||||
[<img src="./docs/chrome-extension-screenshot.png" style="max-width:80%;" alt="Chrome Extension to easily add the current web-page to detect a change." title="Chrome Extension to easily add the current web-page to detect a change." />](https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop)
|
||||
|
||||
[Goto the Chrome Webstore to download the extension.](https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop)
|
||||
|
||||
## Installation
|
||||
|
||||
### Docker
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
|
||||
|
||||
__version__ = '0.45.14'
|
||||
__version__ = '0.45.20'
|
||||
|
||||
from distutils.util import strtobool
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from json.decoder import JSONDecodeError
|
||||
import os
|
||||
#os.environ['EVENTLET_NO_GREENDNS'] = 'yes'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import os
|
||||
from distutils.util import strtobool
|
||||
from changedetectionio.strtobool import strtobool
|
||||
|
||||
from flask_expects_json import expects_json
|
||||
from changedetectionio import queuedWatchMetaData
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
from distutils.util import strtobool
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from flask import Blueprint, request, make_response
|
||||
import os
|
||||
|
||||
|
||||
@@ -6,6 +6,8 @@ import re
|
||||
from random import randint
|
||||
from loguru import logger
|
||||
|
||||
from changedetectionio.content_fetchers.base import manage_user_agent
|
||||
|
||||
# Two flags, tell the JS which of the "Selector" or "Value" field should be enabled in the front end
|
||||
# 0- off, 1- on
|
||||
browser_step_ui_config = {'Choose one': '0 0',
|
||||
@@ -178,6 +180,7 @@ class browsersteps_live_ui(steppable_browser_interface):
|
||||
stale = False
|
||||
# bump and kill this if idle after X sec
|
||||
age_start = 0
|
||||
headers = {}
|
||||
|
||||
# use a special driver, maybe locally etc
|
||||
command_executor = os.getenv(
|
||||
@@ -192,7 +195,8 @@ class browsersteps_live_ui(steppable_browser_interface):
|
||||
|
||||
browser_type = os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').strip('"')
|
||||
|
||||
def __init__(self, playwright_browser, proxy=None):
|
||||
def __init__(self, playwright_browser, proxy=None, headers=None):
|
||||
self.headers = headers or {}
|
||||
self.age_start = time.time()
|
||||
self.playwright_browser = playwright_browser
|
||||
if self.context is None:
|
||||
@@ -206,16 +210,17 @@ class browsersteps_live_ui(steppable_browser_interface):
|
||||
|
||||
# @todo handle multiple contexts, bind a unique id from the browser on each req?
|
||||
self.context = self.playwright_browser.new_context(
|
||||
# @todo
|
||||
# user_agent=request_headers['User-Agent'] if request_headers.get('User-Agent') else 'Mozilla/5.0',
|
||||
# proxy=self.proxy,
|
||||
# This is needed to enable JavaScript execution on GitHub and others
|
||||
bypass_csp=True,
|
||||
# Should never be needed
|
||||
accept_downloads=False,
|
||||
proxy=proxy
|
||||
accept_downloads=False, # Should never be needed
|
||||
bypass_csp=True, # This is needed to enable JavaScript execution on GitHub and others
|
||||
extra_http_headers=self.headers,
|
||||
ignore_https_errors=True,
|
||||
proxy=proxy,
|
||||
service_workers=os.getenv('PLAYWRIGHT_SERVICE_WORKERS', 'allow'),
|
||||
# Should be `allow` or `block` - sites like YouTube can transmit large amounts of data via Service Workers
|
||||
user_agent=manage_user_agent(headers=self.headers),
|
||||
)
|
||||
|
||||
|
||||
self.page = self.context.new_page()
|
||||
|
||||
# self.page.set_default_navigation_timeout(keep_open)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from playwright.sync_api import PlaywrightContextManager
|
||||
import asyncio
|
||||
|
||||
# So playwright wants to run as a context manager, but we do something horrible and hacky
|
||||
# we are holding the session open for as long as possible, then shutting it down, and opening a new one
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
from distutils.util import strtobool
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from flask import Blueprint, flash, redirect, url_for
|
||||
from flask_login import login_required
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
|
||||
@@ -11,9 +11,16 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
def tags_overview_page():
|
||||
from .form import SingleTag
|
||||
add_form = SingleTag(request.form)
|
||||
sorted_tags = sorted(datastore.data['settings']['application'].get('tags').items(), key=lambda x: x[1]['title'])
|
||||
|
||||
from collections import Counter
|
||||
|
||||
tag_count = Counter(tag for watch in datastore.data['watching'].values() if watch.get('tags') for tag in watch['tags'])
|
||||
|
||||
output = render_template("groups-overview.html",
|
||||
available_tags=sorted_tags,
|
||||
form=add_form,
|
||||
available_tags=datastore.data['settings']['application'].get('tags', {}),
|
||||
tag_count=tag_count
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||
<script>
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test', watch_uuid=uuid)}}";
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test', mode="group-settings")}}";
|
||||
</script>
|
||||
|
||||
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
|
||||
@@ -27,6 +27,7 @@
|
||||
<thead>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th># Watches</th>
|
||||
<th>Tag / Label name</th>
|
||||
<th></th>
|
||||
</tr>
|
||||
@@ -40,12 +41,13 @@
|
||||
<td colspan="3">No website organisational tags/groups configured</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% for uuid, tag in available_tags.items() %}
|
||||
{% for uuid, tag in available_tags %}
|
||||
<tr id="{{ uuid }}" class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }}">
|
||||
<td class="watch-controls">
|
||||
<a class="link-mute state-{{'on' if tag.notification_muted else 'off'}}" href="{{url_for('tags.mute', uuid=tag.uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications" class="icon icon-mute" ></a>
|
||||
</td>
|
||||
<td class="title-col inline">{{tag.title}}</td>
|
||||
<td>{{ "{:,}".format(tag_count[uuid]) if uuid in tag_count else 0 }}</td>
|
||||
<td class="title-col inline"> <a href="{{url_for('index', tag=uuid) }}">{{ tag.title }}</a></td>
|
||||
<td>
|
||||
<a class="pure-button pure-button-primary" href="{{ url_for('tags.form_tag_edit', uuid=uuid) }}">Edit</a>
|
||||
<a class="pure-button pure-button-primary" href="{{ url_for('tags.delete', uuid=uuid) }}" title="Deletes and removes tag">Delete</a>
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import sys
|
||||
from distutils.util import strtobool
|
||||
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from loguru import logger
|
||||
from changedetectionio.content_fetchers.exceptions import BrowserStepsStepException
|
||||
import os
|
||||
|
||||
visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4, header, footer, section, article, aside, details, main, nav, section, summary'
|
||||
visualselector_xpath_selectors = 'div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4,header,footer,section,article,aside,details,main,nav,section,summary'
|
||||
|
||||
# available_fetchers() will scan this implementation looking for anything starting with html_
|
||||
# this information is used in the form selections
|
||||
@@ -29,10 +29,15 @@ def available_fetchers():
|
||||
# rather than site-specific.
|
||||
use_playwright_as_chrome_fetcher = os.getenv('PLAYWRIGHT_DRIVER_URL', False)
|
||||
if use_playwright_as_chrome_fetcher:
|
||||
# @note - For now, browser steps always uses playwright
|
||||
if not strtobool(os.getenv('FAST_PUPPETEER_CHROME_FETCHER', 'False')):
|
||||
logger.debug('Using Playwright library as fetcher')
|
||||
from .playwright import fetcher as html_webdriver
|
||||
else:
|
||||
logger.debug('Using direct Python Puppeteer library as fetcher')
|
||||
from .puppeteer import fetcher as html_webdriver
|
||||
|
||||
else:
|
||||
logger.debug("Falling back to selenium as fetcher")
|
||||
from .webdriver_selenium import fetcher as html_webdriver
|
||||
|
||||
|
||||
@@ -5,6 +5,40 @@ from loguru import logger
|
||||
from changedetectionio.content_fetchers import BrowserStepsStepException
|
||||
|
||||
|
||||
def manage_user_agent(headers, current_ua=''):
|
||||
"""
|
||||
Basic setting of user-agent
|
||||
|
||||
NOTE!!!!!! The service that does the actual Chrome fetching should handle any anti-robot techniques
|
||||
THERE ARE MANY WAYS THAT IT CAN BE DETECTED AS A ROBOT!!
|
||||
This does not take care of
|
||||
- Scraping of 'navigator' (platform, productSub, vendor, oscpu etc etc) browser object (navigator.appVersion) etc
|
||||
- TCP/IP fingerprint JA3 etc
|
||||
- Graphic rendering fingerprinting
|
||||
- Your IP being obviously in a pool of bad actors
|
||||
- Too many requests
|
||||
- Scraping of SCH-UA browser replies (thanks google!!)
|
||||
- Scraping of ServiceWorker, new window calls etc
|
||||
|
||||
See https://filipvitas.medium.com/how-to-set-user-agent-header-with-puppeteer-js-and-not-fail-28c7a02165da
|
||||
Puppeteer requests https://github.com/dgtlmoon/pyppeteerstealth
|
||||
|
||||
:param page:
|
||||
:param headers:
|
||||
:return:
|
||||
"""
|
||||
# Ask it what the user agent is, if its obviously ChromeHeadless, switch it to the default
|
||||
ua_in_custom_headers = next((v for k, v in headers.items() if k.lower() == "user-agent"), None)
|
||||
if ua_in_custom_headers:
|
||||
return ua_in_custom_headers
|
||||
|
||||
if not ua_in_custom_headers and current_ua:
|
||||
current_ua = current_ua.replace('HeadlessChrome', 'Chrome')
|
||||
return current_ua
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class Fetcher():
|
||||
browser_connection_is_custom = None
|
||||
browser_connection_url = None
|
||||
|
||||
@@ -36,6 +36,13 @@ class BrowserConnectError(Exception):
|
||||
logger.error(f"Browser connection error {msg}")
|
||||
return
|
||||
|
||||
class BrowserFetchTimedOut(Exception):
|
||||
msg = ''
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
logger.error(f"Browser processing took too long - {msg}")
|
||||
return
|
||||
|
||||
class BrowserStepsStepException(Exception):
|
||||
def __init__(self, step_n, original_e):
|
||||
self.step_n = step_n
|
||||
|
||||
@@ -3,7 +3,8 @@ import os
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from loguru import logger
|
||||
from changedetectionio.content_fetchers.base import Fetcher
|
||||
|
||||
from changedetectionio.content_fetchers.base import Fetcher, manage_user_agent
|
||||
from changedetectionio.content_fetchers.exceptions import PageUnloadable, Non200ErrorCodeReceived, EmptyReply, ScreenshotUnavailable
|
||||
|
||||
class fetcher(Fetcher):
|
||||
@@ -102,19 +103,16 @@ class fetcher(Fetcher):
|
||||
# Set user agent to prevent Cloudflare from blocking the browser
|
||||
# Use the default one configured in the App.py model that's passed from fetch_site_status.py
|
||||
context = browser.new_context(
|
||||
user_agent={k.lower(): v for k, v in request_headers.items()}.get('user-agent', None),
|
||||
accept_downloads=False, # Should never be needed
|
||||
bypass_csp=True, # This is needed to enable JavaScript execution on GitHub and others
|
||||
extra_http_headers=request_headers,
|
||||
ignore_https_errors=True,
|
||||
proxy=self.proxy,
|
||||
# This is needed to enable JavaScript execution on GitHub and others
|
||||
bypass_csp=True,
|
||||
# Should be `allow` or `block` - sites like YouTube can transmit large amounts of data via Service Workers
|
||||
service_workers=os.getenv('PLAYWRIGHT_SERVICE_WORKERS', 'allow'),
|
||||
# Should never be needed
|
||||
accept_downloads=False
|
||||
service_workers=os.getenv('PLAYWRIGHT_SERVICE_WORKERS', 'allow'), # Should be `allow` or `block` - sites like YouTube can transmit large amounts of data via Service Workers
|
||||
user_agent=manage_user_agent(headers=request_headers),
|
||||
)
|
||||
|
||||
self.page = context.new_page()
|
||||
if len(request_headers):
|
||||
context.set_extra_http_headers(request_headers)
|
||||
|
||||
# Listen for all console events and handle errors
|
||||
self.page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}"))
|
||||
|
||||
@@ -5,8 +5,9 @@ import websockets.exceptions
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from loguru import logger
|
||||
from changedetectionio.content_fetchers.base import Fetcher
|
||||
from changedetectionio.content_fetchers.exceptions import PageUnloadable, Non200ErrorCodeReceived, EmptyReply, ScreenshotUnavailable, BrowserConnectError
|
||||
|
||||
from changedetectionio.content_fetchers.base import Fetcher, manage_user_agent
|
||||
from changedetectionio.content_fetchers.exceptions import PageUnloadable, Non200ErrorCodeReceived, EmptyReply, BrowserFetchTimedOut, BrowserConnectError
|
||||
|
||||
|
||||
class fetcher(Fetcher):
|
||||
@@ -100,10 +101,11 @@ class fetcher(Fetcher):
|
||||
else:
|
||||
self.page = await browser.newPage()
|
||||
|
||||
await self.page.setUserAgent(manage_user_agent(headers=request_headers, current_ua=await self.page.evaluate('navigator.userAgent')))
|
||||
|
||||
await self.page.setBypassCSP(True)
|
||||
if request_headers:
|
||||
await self.page.setExtraHTTPHeaders(request_headers)
|
||||
# @todo check user-agent worked
|
||||
|
||||
# SOCKS5 with authentication is not supported (yet)
|
||||
# https://github.com/microsoft/playwright/issues/10567
|
||||
@@ -212,8 +214,12 @@ class fetcher(Fetcher):
|
||||
logger.error('ERROR: Failed to get viewport-only reduced screenshot :(')
|
||||
pass
|
||||
finally:
|
||||
# It's good to log here in the case that the browser crashes on shutting down but we still get the data we need
|
||||
logger.success(f"Fetching '{url}' complete, closing page")
|
||||
await self.page.close()
|
||||
logger.success(f"Fetching '{url}' complete, closing browser")
|
||||
await browser.close()
|
||||
logger.success(f"Fetching '{url}' complete, exiting puppeteer fetch.")
|
||||
|
||||
async def main(self, **kwargs):
|
||||
await self.fetch_page(**kwargs)
|
||||
@@ -221,14 +227,21 @@ class fetcher(Fetcher):
|
||||
def run(self, url, timeout, request_headers, request_body, request_method, ignore_status_codes=False,
|
||||
current_include_filters=None, is_binary=False):
|
||||
|
||||
#@todo make update_worker async which could run any of these content_fetchers within memory and time constraints
|
||||
max_time = os.getenv('PUPPETEER_MAX_PROCESSING_TIMEOUT_SECONDS', 180)
|
||||
|
||||
# This will work in 3.10 but not >= 3.11 because 3.11 wants tasks only
|
||||
asyncio.run(self.main(
|
||||
url=url,
|
||||
timeout=timeout,
|
||||
request_headers=request_headers,
|
||||
request_body=request_body,
|
||||
request_method=request_method,
|
||||
ignore_status_codes=ignore_status_codes,
|
||||
current_include_filters=current_include_filters,
|
||||
is_binary=is_binary
|
||||
))
|
||||
try:
|
||||
asyncio.run(asyncio.wait_for(self.main(
|
||||
url=url,
|
||||
timeout=timeout,
|
||||
request_headers=request_headers,
|
||||
request_body=request_body,
|
||||
request_method=request_method,
|
||||
ignore_status_codes=ignore_status_codes,
|
||||
current_include_filters=current_include_filters,
|
||||
is_binary=is_binary
|
||||
), timeout=max_time))
|
||||
except asyncio.TimeoutError:
|
||||
raise(BrowserFetchTimedOut(msg=f"Browser connected but was unable to process the page in {max_time} seconds."))
|
||||
|
||||
|
||||
@@ -10,12 +10,15 @@ function isItemInStock() {
|
||||
const outOfStockTexts = [
|
||||
' أخبرني عندما يتوفر',
|
||||
'0 in stock',
|
||||
'actuellement indisponible',
|
||||
'agotado',
|
||||
'article épuisé',
|
||||
'artikel zurzeit vergriffen',
|
||||
'as soon as stock is available',
|
||||
'ausverkauft', // sold out
|
||||
'available for back order',
|
||||
'awaiting stock',
|
||||
'back in stock soon',
|
||||
'back-order or out of stock',
|
||||
'backordered',
|
||||
'benachrichtigt mich', // notify me
|
||||
@@ -24,6 +27,7 @@ function isItemInStock() {
|
||||
'coming soon',
|
||||
'currently have any tickets for this',
|
||||
'currently unavailable',
|
||||
'dieser artikel ist bald wieder verfügbar',
|
||||
'dostępne wkrótce',
|
||||
'en rupture de stock',
|
||||
'ist derzeit nicht auf lager',
|
||||
@@ -42,9 +46,9 @@ function isItemInStock() {
|
||||
'no tickets available',
|
||||
'not available',
|
||||
'not currently available',
|
||||
'not in stock',
|
||||
'not in stock',
|
||||
'notify me when available',
|
||||
'notify when available',
|
||||
'notify when available',
|
||||
'não estamos a aceitar encomendas',
|
||||
'out of stock',
|
||||
'out-of-stock',
|
||||
@@ -54,18 +58,26 @@ function isItemInStock() {
|
||||
'sold-out',
|
||||
'temporarily out of stock',
|
||||
'temporarily unavailable',
|
||||
'there were no search results for',
|
||||
'this item is currently unavailable',
|
||||
'tickets unavailable',
|
||||
'tijdelijk uitverkocht',
|
||||
'unavailable tickets',
|
||||
'vorbestellung ist bald möglich',
|
||||
'we couldn\'t find any products that match',
|
||||
'we do not currently have an estimate of when this product will be back in stock.',
|
||||
'we don\'t know when or if this item will be back in stock.',
|
||||
'we were not able to find a match',
|
||||
'zur zeit nicht an lager',
|
||||
'品切れ',
|
||||
'已售',
|
||||
'已售完',
|
||||
'품절'
|
||||
];
|
||||
|
||||
|
||||
const vh = Math.max(document.documentElement.clientHeight || 0, window.innerHeight || 0);
|
||||
|
||||
function getElementBaseText(element) {
|
||||
// .textContent can include text from children which may give the wrong results
|
||||
// scan only immediate TEXT_NODEs, which will be a child of the element
|
||||
@@ -76,29 +88,69 @@ function isItemInStock() {
|
||||
return text.toLowerCase().trim();
|
||||
}
|
||||
|
||||
const negateOutOfStockRegex = new RegExp('([0-9] in stock|add to cart)', 'ig');
|
||||
const negateOutOfStockRegex = new RegExp('^([0-9] in stock|add to cart|in stock)', 'ig');
|
||||
|
||||
// The out-of-stock or in-stock-text is generally always above-the-fold
|
||||
// and often below-the-fold is a list of related products that may or may not contain trigger text
|
||||
// so it's good to filter to just the 'above the fold' elements
|
||||
// and it should be atleast 100px from the top to ignore items in the toolbar, sometimes menu items like "Coming soon" exist
|
||||
const elementsToScan = Array.from(document.getElementsByTagName('*')).filter(element => element.getBoundingClientRect().top + window.scrollY <= vh && element.getBoundingClientRect().top + window.scrollY >= 100);
|
||||
|
||||
|
||||
// @todo - if it's SVG or IMG, go into image diff mode
|
||||
// %ELEMENTS% replaced at injection time because different interfaces use it with different settings
|
||||
|
||||
console.log("Scanning %ELEMENTS%");
|
||||
|
||||
function collectVisibleElements(parent, visibleElements) {
|
||||
if (!parent) return; // Base case: if parent is null or undefined, return
|
||||
|
||||
// Add the parent itself to the visible elements array if it's of the specified types
|
||||
visibleElements.push(parent);
|
||||
|
||||
// Iterate over the parent's children
|
||||
const children = parent.children;
|
||||
for (let i = 0; i < children.length; i++) {
|
||||
const child = children[i];
|
||||
if (
|
||||
child.nodeType === Node.ELEMENT_NODE &&
|
||||
window.getComputedStyle(child).display !== 'none' &&
|
||||
window.getComputedStyle(child).visibility !== 'hidden' &&
|
||||
child.offsetWidth >= 0 &&
|
||||
child.offsetHeight >= 0 &&
|
||||
window.getComputedStyle(child).contentVisibility !== 'hidden'
|
||||
) {
|
||||
// If the child is an element and is visible, recursively collect visible elements
|
||||
collectVisibleElements(child, visibleElements);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const elementsToScan = [];
|
||||
collectVisibleElements(document.body, elementsToScan);
|
||||
|
||||
var elementText = "";
|
||||
|
||||
// REGEXS THAT REALLY MEAN IT'S IN STOCK
|
||||
for (let i = elementsToScan.length - 1; i >= 0; i--) {
|
||||
const element = elementsToScan[i];
|
||||
|
||||
// outside the 'fold' or some weird text in the heading area
|
||||
// .getBoundingClientRect() was causing a crash in chrome 119, can only be run on contentVisibility != hidden
|
||||
if (element.getBoundingClientRect().top + window.scrollY >= vh || element.getBoundingClientRect().top + window.scrollY <= 100) {
|
||||
continue
|
||||
}
|
||||
|
||||
elementText = "";
|
||||
if (element.tagName.toLowerCase() === "input") {
|
||||
elementText = element.value.toLowerCase();
|
||||
elementText = element.value.toLowerCase().trim();
|
||||
} else {
|
||||
elementText = getElementBaseText(element);
|
||||
}
|
||||
|
||||
if (elementText.length) {
|
||||
// try which ones could mean its in stock
|
||||
if (negateOutOfStockRegex.test(elementText)) {
|
||||
if (negateOutOfStockRegex.test(elementText) && !elementText.includes('(0 products)')) {
|
||||
console.log(`Negating/overriding 'Out of Stock' back to "Possibly in stock" found "${elementText}"`)
|
||||
return 'Possibly in stock';
|
||||
}
|
||||
}
|
||||
@@ -107,28 +159,34 @@ function isItemInStock() {
|
||||
// OTHER STUFF THAT COULD BE THAT IT'S OUT OF STOCK
|
||||
for (let i = elementsToScan.length - 1; i >= 0; i--) {
|
||||
const element = elementsToScan[i];
|
||||
if (element.offsetWidth > 0 || element.offsetHeight > 0 || element.getClientRects().length > 0) {
|
||||
elementText = "";
|
||||
if (element.tagName.toLowerCase() === "input") {
|
||||
elementText = element.value.toLowerCase();
|
||||
} else {
|
||||
elementText = getElementBaseText(element);
|
||||
}
|
||||
// outside the 'fold' or some weird text in the heading area
|
||||
// .getBoundingClientRect() was causing a crash in chrome 119, can only be run on contentVisibility != hidden
|
||||
if (element.getBoundingClientRect().top + window.scrollY >= vh + 150 || element.getBoundingClientRect().top + window.scrollY <= 100) {
|
||||
continue
|
||||
}
|
||||
elementText = "";
|
||||
if (element.tagName.toLowerCase() === "input") {
|
||||
elementText = element.value.toLowerCase().trim();
|
||||
} else {
|
||||
elementText = getElementBaseText(element);
|
||||
}
|
||||
|
||||
if (elementText.length) {
|
||||
// and these mean its out of stock
|
||||
for (const outOfStockText of outOfStockTexts) {
|
||||
if (elementText.includes(outOfStockText)) {
|
||||
return outOfStockText; // item is out of stock
|
||||
}
|
||||
if (elementText.length) {
|
||||
// and these mean its out of stock
|
||||
for (const outOfStockText of outOfStockTexts) {
|
||||
if (elementText.includes(outOfStockText)) {
|
||||
console.log(`Selected 'Out of Stock' - found text "${outOfStockText}" - "${elementText}"`)
|
||||
return outOfStockText; // item is out of stock
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Returning 'Possibly in stock' - cant' find any useful matching text`)
|
||||
return 'Possibly in stock'; // possibly in stock, cant decide otherwise.
|
||||
}
|
||||
|
||||
// returns the element text that makes it think it's out of stock
|
||||
return isItemInStock().trim()
|
||||
|
||||
|
||||
|
||||
@@ -16,24 +16,23 @@ try {
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Include the getXpath script directly, easier than fetching
|
||||
function getxpath(e) {
|
||||
var n = e;
|
||||
if (n && n.id) return '//*[@id="' + n.id + '"]';
|
||||
for (var o = []; n && Node.ELEMENT_NODE === n.nodeType;) {
|
||||
for (var i = 0, r = !1, d = n.previousSibling; d;) d.nodeType !== Node.DOCUMENT_TYPE_NODE && d.nodeName === n.nodeName && i++, d = d.previousSibling;
|
||||
for (d = n.nextSibling; d;) {
|
||||
if (d.nodeName === n.nodeName) {
|
||||
r = !0;
|
||||
break
|
||||
}
|
||||
d = d.nextSibling
|
||||
var n = e;
|
||||
if (n && n.id) return '//*[@id="' + n.id + '"]';
|
||||
for (var o = []; n && Node.ELEMENT_NODE === n.nodeType;) {
|
||||
for (var i = 0, r = !1, d = n.previousSibling; d;) d.nodeType !== Node.DOCUMENT_TYPE_NODE && d.nodeName === n.nodeName && i++, d = d.previousSibling;
|
||||
for (d = n.nextSibling; d;) {
|
||||
if (d.nodeName === n.nodeName) {
|
||||
r = !0;
|
||||
break
|
||||
}
|
||||
o.push((n.prefix ? n.prefix + ":" : "") + n.localName + (i || r ? "[" + (i + 1) + "]" : "")), n = n.parentNode
|
||||
d = d.nextSibling
|
||||
}
|
||||
return o.length ? "/" + o.reverse().join("/") : ""
|
||||
o.push((n.prefix ? n.prefix + ":" : "") + n.localName + (i || r ? "[" + (i + 1) + "]" : "")), n = n.parentNode
|
||||
}
|
||||
return o.length ? "/" + o.reverse().join("/") : ""
|
||||
}
|
||||
|
||||
const findUpTag = (el) => {
|
||||
let r = el
|
||||
@@ -59,14 +58,14 @@ const findUpTag = (el) => {
|
||||
|
||||
// Strategy 2: Keep going up until we hit an ID tag, imagine it's like #list-widget div h4
|
||||
while (r.parentNode) {
|
||||
if (depth == 5) {
|
||||
if (depth === 5) {
|
||||
break;
|
||||
}
|
||||
if ('' !== r.id) {
|
||||
chained_css.unshift("#" + CSS.escape(r.id));
|
||||
final_selector = chained_css.join(' > ');
|
||||
// Be sure theres only one, some sites have multiples of the same ID tag :-(
|
||||
if (window.document.querySelectorAll(final_selector).length == 1) {
|
||||
if (window.document.querySelectorAll(final_selector).length === 1) {
|
||||
return final_selector;
|
||||
}
|
||||
return null;
|
||||
@@ -82,30 +81,60 @@ const findUpTag = (el) => {
|
||||
|
||||
// @todo - if it's SVG or IMG, go into image diff mode
|
||||
// %ELEMENTS% replaced at injection time because different interfaces use it with different settings
|
||||
var elements = window.document.querySelectorAll("%ELEMENTS%");
|
||||
|
||||
var size_pos = [];
|
||||
// after page fetch, inject this JS
|
||||
// build a map of all elements and their positions (maybe that only include text?)
|
||||
var bbox;
|
||||
for (var i = 0; i < elements.length; i++) {
|
||||
bbox = elements[i].getBoundingClientRect();
|
||||
console.log("Scanning %ELEMENTS%");
|
||||
|
||||
// Exclude items that are not interactable or visible
|
||||
if(elements[i].style.opacity === "0") {
|
||||
continue
|
||||
function collectVisibleElements(parent, visibleElements) {
|
||||
if (!parent) return; // Base case: if parent is null or undefined, return
|
||||
|
||||
|
||||
// Add the parent itself to the visible elements array if it's of the specified types
|
||||
const tagName = parent.tagName.toLowerCase();
|
||||
if ("%ELEMENTS%".split(',').includes(tagName)) {
|
||||
visibleElements.push(parent);
|
||||
}
|
||||
if(elements[i].style.display === "none" || elements[i].style.pointerEvents === "none" ) {
|
||||
continue
|
||||
|
||||
// Iterate over the parent's children
|
||||
const children = parent.children;
|
||||
for (let i = 0; i < children.length; i++) {
|
||||
const child = children[i];
|
||||
if (
|
||||
child.nodeType === Node.ELEMENT_NODE &&
|
||||
window.getComputedStyle(child).display !== 'none' &&
|
||||
window.getComputedStyle(child).visibility !== 'hidden' &&
|
||||
child.offsetWidth >= 0 &&
|
||||
child.offsetHeight >= 0 &&
|
||||
window.getComputedStyle(child).contentVisibility !== 'hidden'
|
||||
) {
|
||||
// If the child is an element and is visible, recursively collect visible elements
|
||||
collectVisibleElements(child, visibleElements);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create an array to hold the visible elements
|
||||
const visibleElementsArray = [];
|
||||
|
||||
// Call collectVisibleElements with the starting parent element
|
||||
collectVisibleElements(document.body, visibleElementsArray);
|
||||
|
||||
|
||||
visibleElementsArray.forEach(function (element) {
|
||||
|
||||
bbox = element.getBoundingClientRect();
|
||||
|
||||
// Skip really small ones, and where width or height ==0
|
||||
if (bbox['width'] * bbox['height'] < 100) {
|
||||
continue;
|
||||
if (bbox['width'] * bbox['height'] < 10) {
|
||||
return
|
||||
}
|
||||
|
||||
// Don't include elements that are offset from canvas
|
||||
if (bbox['top']+scroll_y < 0 || bbox['left'] < 0) {
|
||||
continue;
|
||||
if (bbox['top'] + scroll_y < 0 || bbox['left'] < 0) {
|
||||
return
|
||||
}
|
||||
|
||||
// @todo the getXpath kind of sucks, it doesnt know when there is for example just one ID sometimes
|
||||
@@ -114,46 +143,41 @@ for (var i = 0; i < elements.length; i++) {
|
||||
|
||||
// 1st primitive - if it has class, try joining it all and select, if theres only one.. well thats us.
|
||||
xpath_result = false;
|
||||
|
||||
try {
|
||||
var d = findUpTag(elements[i]);
|
||||
var d = findUpTag(element);
|
||||
if (d) {
|
||||
xpath_result = d;
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
|
||||
// You could swap it and default to getXpath and then try the smarter one
|
||||
// default back to the less intelligent one
|
||||
if (!xpath_result) {
|
||||
try {
|
||||
// I've seen on FB and eBay that this doesnt work
|
||||
// ReferenceError: getXPath is not defined at eval (eval at evaluate (:152:29), <anonymous>:67:20) at UtilityScript.evaluate (<anonymous>:159:18) at UtilityScript.<anonymous> (<anonymous>:1:44)
|
||||
xpath_result = getxpath(elements[i]);
|
||||
xpath_result = getxpath(element);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
continue;
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if (window.getComputedStyle(elements[i]).visibility === "hidden") {
|
||||
continue;
|
||||
}
|
||||
|
||||
// @todo Possible to ONLY list where it's clickable to save JSON xfer size
|
||||
size_pos.push({
|
||||
xpath: xpath_result,
|
||||
width: Math.round(bbox['width']),
|
||||
height: Math.round(bbox['height']),
|
||||
left: Math.floor(bbox['left']),
|
||||
top: Math.floor(bbox['top'])+scroll_y,
|
||||
tagName: (elements[i].tagName) ? elements[i].tagName.toLowerCase() : '',
|
||||
tagtype: (elements[i].tagName == 'INPUT' && elements[i].type) ? elements[i].type.toLowerCase() : '',
|
||||
isClickable: (elements[i].onclick) || window.getComputedStyle(elements[i]).cursor == "pointer"
|
||||
top: Math.floor(bbox['top']) + scroll_y,
|
||||
tagName: (element.tagName) ? element.tagName.toLowerCase() : '',
|
||||
tagtype: (element.tagName.toLowerCase() === 'input' && element.type) ? element.type.toLowerCase() : '',
|
||||
isClickable: window.getComputedStyle(element).cursor == "pointer"
|
||||
});
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// Inject the current one set in the include_filters, which may be a CSS rule
|
||||
// used for displaying the current one in VisualSelector, where its not one we generated.
|
||||
@@ -180,7 +204,7 @@ if (include_filters.length) {
|
||||
}
|
||||
} catch (e) {
|
||||
// Maybe catch DOMException and alert?
|
||||
console.log("xpath_element_scraper: Exception selecting element from filter "+f);
|
||||
console.log("xpath_element_scraper: Exception selecting element from filter " + f);
|
||||
console.log(e);
|
||||
}
|
||||
|
||||
@@ -210,8 +234,8 @@ if (include_filters.length) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!q) {
|
||||
|
||||
if (!q) {
|
||||
console.log("xpath_element_scraper: filter element " + f + " was not found");
|
||||
}
|
||||
|
||||
@@ -221,7 +245,7 @@ if (include_filters.length) {
|
||||
width: parseInt(bbox['width']),
|
||||
height: parseInt(bbox['height']),
|
||||
left: parseInt(bbox['left']),
|
||||
top: parseInt(bbox['top'])+scroll_y
|
||||
top: parseInt(bbox['top']) + scroll_y
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -229,7 +253,7 @@ if (include_filters.length) {
|
||||
|
||||
// Sort the elements so we find the smallest one first, in other words, we find the smallest one matching in that area
|
||||
// so that we dont select the wrapping element by mistake and be unable to select what we want
|
||||
size_pos.sort((a, b) => (a.width*a.height > b.width*b.height) ? 1 : -1)
|
||||
size_pos.sort((a, b) => (a.width * a.height > b.width * b.height) ? 1 : -1)
|
||||
|
||||
// Window.width required for proper scaling in the frontend
|
||||
return {'size_pos': size_pos, 'browser_width': window.innerWidth};
|
||||
|
||||
@@ -6,7 +6,7 @@ import queue
|
||||
import threading
|
||||
import time
|
||||
from copy import deepcopy
|
||||
from distutils.util import strtobool
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from functools import wraps
|
||||
from threading import Event
|
||||
|
||||
@@ -30,6 +30,7 @@ from flask_compress import Compress as FlaskCompress
|
||||
from flask_login import current_user
|
||||
from flask_paginate import Pagination, get_page_parameter
|
||||
from flask_restful import abort, Api
|
||||
from flask_cors import CORS
|
||||
from flask_wtf import CSRFProtect
|
||||
from loguru import logger
|
||||
|
||||
@@ -53,6 +54,9 @@ app = Flask(__name__,
|
||||
static_folder="static",
|
||||
template_folder="templates")
|
||||
|
||||
# Enable CORS, especially useful for the Chrome extension to operate from anywhere
|
||||
CORS(app)
|
||||
|
||||
# Super handy for compressing large BrowserSteps responses and others
|
||||
FlaskCompress(app)
|
||||
|
||||
@@ -404,17 +408,21 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
global datastore
|
||||
from changedetectionio import forms
|
||||
|
||||
limit_tag = request.args.get('tag', '').lower().strip()
|
||||
active_tag_req = request.args.get('tag', '').lower().strip()
|
||||
active_tag_uuid = active_tag = None
|
||||
|
||||
# Be sure limit_tag is a uuid
|
||||
for uuid, tag in datastore.data['settings']['application'].get('tags', {}).items():
|
||||
if limit_tag == tag.get('title', '').lower().strip():
|
||||
limit_tag = uuid
|
||||
if active_tag_req:
|
||||
for uuid, tag in datastore.data['settings']['application'].get('tags', {}).items():
|
||||
if active_tag_req == tag.get('title', '').lower().strip() or active_tag_req == uuid:
|
||||
active_tag = tag
|
||||
active_tag_uuid = uuid
|
||||
break
|
||||
|
||||
|
||||
# Redirect for the old rss path which used the /?rss=true
|
||||
if request.args.get('rss'):
|
||||
return redirect(url_for('rss', tag=limit_tag))
|
||||
return redirect(url_for('rss', tag=active_tag_uuid))
|
||||
|
||||
op = request.args.get('op')
|
||||
if op:
|
||||
@@ -425,7 +433,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
datastore.data['watching'][uuid].toggle_mute()
|
||||
|
||||
datastore.needs_write = True
|
||||
return redirect(url_for('index', tag = limit_tag))
|
||||
return redirect(url_for('index', tag = active_tag_uuid))
|
||||
|
||||
# Sort by last_changed and add the uuid which is usually the key..
|
||||
sorted_watches = []
|
||||
@@ -436,7 +444,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
if with_errors and not watch.get('last_error'):
|
||||
continue
|
||||
|
||||
if limit_tag and not limit_tag in watch['tags']:
|
||||
if active_tag_uuid and not active_tag_uuid in watch['tags']:
|
||||
continue
|
||||
if watch.get('last_error'):
|
||||
errored_count += 1
|
||||
@@ -455,11 +463,12 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
total=total_count,
|
||||
per_page=datastore.data['settings']['application'].get('pager_size', 50), css_framework="semantic")
|
||||
|
||||
|
||||
sorted_tags = sorted(datastore.data['settings']['application'].get('tags').items(), key=lambda x: x[1]['title'])
|
||||
output = render_template(
|
||||
"watch-overview.html",
|
||||
# Don't link to hosting when we're on the hosting environment
|
||||
active_tag=limit_tag,
|
||||
active_tag=active_tag,
|
||||
active_tag_uuid=active_tag_uuid,
|
||||
app_rss_token=datastore.data['settings']['application']['rss_access_token'],
|
||||
datastore=datastore,
|
||||
errored_count=errored_count,
|
||||
@@ -474,7 +483,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
sort_attribute=request.args.get('sort') if request.args.get('sort') else request.cookies.get('sort'),
|
||||
sort_order=request.args.get('order') if request.args.get('order') else request.cookies.get('order'),
|
||||
system_default_fetcher=datastore.data['settings']['application'].get('fetch_backend'),
|
||||
tags=datastore.data['settings']['application'].get('tags'),
|
||||
tags=sorted_tags,
|
||||
watches=sorted_watches
|
||||
)
|
||||
|
||||
@@ -507,21 +516,38 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
watch = datastore.data['watching'].get(watch_uuid) if watch_uuid else None
|
||||
|
||||
# validate URLS
|
||||
if not len(request.form['notification_urls'].strip()):
|
||||
return make_response({'error': 'No Notification URLs set'}, 400)
|
||||
notification_urls = request.form['notification_urls'].strip().splitlines()
|
||||
|
||||
for server_url in request.form['notification_urls'].splitlines():
|
||||
if len(server_url.strip()):
|
||||
if not apobj.add(server_url):
|
||||
message = '{} is not a valid AppRise URL.'.format(server_url)
|
||||
return make_response({'error': message}, 400)
|
||||
if not notification_urls:
|
||||
logger.debug("Test notification - Trying by group/tag in the edit form if available")
|
||||
# On an edit page, we should also fire off to the tags if they have notifications
|
||||
if request.form.get('tags') and request.form['tags'].strip():
|
||||
for k in request.form['tags'].split(','):
|
||||
tag = datastore.tag_exists_by_name(k.strip())
|
||||
notification_urls = tag.get('notifications_urls') if tag and tag.get('notifications_urls') else None
|
||||
|
||||
is_global_settings_form = request.args.get('mode', '') == 'global-settings'
|
||||
is_group_settings_form = request.args.get('mode', '') == 'group-settings'
|
||||
if not notification_urls and not is_global_settings_form and not is_group_settings_form:
|
||||
# In the global settings, use only what is typed currently in the text box
|
||||
logger.debug("Test notification - Trying by global system settings notifications")
|
||||
if datastore.data['settings']['application'].get('notification_urls'):
|
||||
notification_urls = datastore.data['settings']['application']['notification_urls']
|
||||
|
||||
|
||||
if not notification_urls:
|
||||
return 'No Notification URLs set/found'
|
||||
|
||||
for n_url in notification_urls:
|
||||
if len(n_url.strip()):
|
||||
if not apobj.add(n_url):
|
||||
return f'Error - {n_url} is not a valid AppRise URL.'
|
||||
|
||||
try:
|
||||
# use the same as when it is triggered, but then override it with the form test values
|
||||
n_object = {
|
||||
'watch_url': request.form['window_url'],
|
||||
'notification_urls': request.form['notification_urls'].splitlines()
|
||||
'notification_urls': notification_urls
|
||||
}
|
||||
|
||||
# Only use if present, if not set in n_object it should use the default system value
|
||||
@@ -540,7 +566,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
except Exception as e:
|
||||
return make_response({'error': str(e)}, 400)
|
||||
|
||||
return 'OK'
|
||||
return 'OK - Sent test notifications'
|
||||
|
||||
|
||||
@app.route("/clear_history/<string:uuid>", methods=['GET'])
|
||||
@@ -577,6 +603,12 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
output = render_template("clear_all_history.html")
|
||||
return output
|
||||
|
||||
def _watch_has_tag_options_set(watch):
|
||||
"""This should be fixed better so that Tag is some proper Model, a tag is just a Watch also"""
|
||||
for tag_uuid, tag in datastore.data['settings']['application'].get('tags', {}).items():
|
||||
if tag_uuid in watch.get('tags', []) and (tag.get('include_filters') or tag.get('subtractive_selectors')):
|
||||
return True
|
||||
|
||||
@app.route("/edit/<string:uuid>", methods=['GET', 'POST'])
|
||||
@login_optionally_required
|
||||
# https://stackoverflow.com/questions/42984453/wtforms-populate-form-with-data-if-data-exists
|
||||
@@ -747,6 +779,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
has_default_notification_urls=True if len(datastore.data['settings']['application']['notification_urls']) else False,
|
||||
has_empty_checktime=using_default_check_time,
|
||||
has_extra_headers_file=len(datastore.get_all_headers_in_textfile_for_watch(uuid=uuid)) > 0,
|
||||
has_special_tag_options=_watch_has_tag_options_set(watch=watch),
|
||||
is_html_webdriver=is_html_webdriver,
|
||||
jq_support=jq_support,
|
||||
playwright_enabled=os.getenv('PLAYWRIGHT_DRIVER_URL', False),
|
||||
@@ -1270,9 +1303,8 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
url = request.form.get('url').strip()
|
||||
if datastore.url_exists(url):
|
||||
flash('The URL {} already exists'.format(url), "error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
flash(f'Warning, URL {url} already exists', "notice")
|
||||
|
||||
add_paused = request.form.get('edit_and_watch_submit_button') != None
|
||||
processor = request.form.get('processor', 'text_json_diff')
|
||||
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tags').strip(), extras={'paused': add_paused, 'processor': processor})
|
||||
@@ -1422,6 +1454,13 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
|
||||
flash("{} watches queued for rechecking".format(len(uuids)))
|
||||
|
||||
elif (op == 'clear-errors'):
|
||||
for uuid in uuids:
|
||||
uuid = uuid.strip()
|
||||
if datastore.data['watching'].get(uuid):
|
||||
datastore.data['watching'][uuid]["last_error"] = False
|
||||
flash(f"{len(uuids)} watches errors cleared")
|
||||
|
||||
elif (op == 'clear-history'):
|
||||
for uuid in uuids:
|
||||
uuid = uuid.strip()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import os
|
||||
import re
|
||||
from distutils.util import strtobool
|
||||
from changedetectionio.strtobool import strtobool
|
||||
|
||||
from wtforms import (
|
||||
BooleanField,
|
||||
|
||||
@@ -169,14 +169,14 @@ def xpath1_filter(xpath_filter, html_content, append_pretty_line_formatting=Fals
|
||||
# And where the matched result doesn't include something that will cause Inscriptis to add a newline
|
||||
# (This way each 'match' reliably has a new-line in the diff)
|
||||
# Divs are converted to 4 whitespaces by inscriptis
|
||||
if append_pretty_line_formatting and len(html_block) and (not hasattr( element, 'tag' ) or not element.tag in (['br', 'hr', 'div', 'p'])):
|
||||
if append_pretty_line_formatting and len(html_block) and (not hasattr(element, 'tag') or not element.tag in (['br', 'hr', 'div', 'p'])):
|
||||
html_block += TEXT_FILTER_LIST_LINE_SUFFIX
|
||||
|
||||
if type(element) == etree._ElementStringResult:
|
||||
html_block += str(element)
|
||||
elif type(element) == etree._ElementUnicodeResult:
|
||||
html_block += str(element)
|
||||
# Some kind of text, UTF-8 or other
|
||||
if isinstance(element, (str, bytes)):
|
||||
html_block += element
|
||||
else:
|
||||
# Return the HTML which will get parsed as text
|
||||
html_block += etree.tostring(element, pretty_print=True).decode('utf-8')
|
||||
|
||||
return html_block
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from distutils.util import strtobool
|
||||
from changedetectionio.strtobool import strtobool
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
@@ -362,6 +362,7 @@ class model(dict):
|
||||
# @todo bump static cache of the last timestamp so we dont need to examine the file to set a proper ''viewed'' status
|
||||
return snapshot_fname
|
||||
|
||||
@property
|
||||
@property
|
||||
def has_empty_checktime(self):
|
||||
# using all() + dictionary comprehension
|
||||
|
||||
@@ -3,7 +3,7 @@ import os
|
||||
import hashlib
|
||||
import re
|
||||
from copy import deepcopy
|
||||
from distutils.util import strtobool
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from loguru import logger
|
||||
|
||||
class difference_detection_processor():
|
||||
@@ -75,8 +75,12 @@ class difference_detection_processor():
|
||||
|
||||
proxy_url = None
|
||||
if preferred_proxy_id:
|
||||
proxy_url = self.datastore.proxy_list.get(preferred_proxy_id).get('url')
|
||||
logger.debug(f"Selected proxy key '{preferred_proxy_id}' as proxy URL '{proxy_url}' for {url}")
|
||||
# Custom browser endpoints should NOT have a proxy added
|
||||
if not prefer_fetch_backend.startswith('extra_browser_'):
|
||||
proxy_url = self.datastore.proxy_list.get(preferred_proxy_id).get('url')
|
||||
logger.debug(f"Selected proxy key '{preferred_proxy_id}' as proxy URL '{proxy_url}' for {url}")
|
||||
else:
|
||||
logger.debug(f"Skipping adding proxy data when custom Browser endpoint is specified. ")
|
||||
|
||||
# Now call the fetcher (playwright/requests/etc) with arguments that only a fetcher would need.
|
||||
# When browser_connection_url is None, it method should default to working out whats the best defaults (os env vars etc)
|
||||
|
||||
@@ -119,7 +119,7 @@ class perform_site_check(difference_detection_processor):
|
||||
include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='include_filters')
|
||||
|
||||
# 1845 - remove duplicated filters in both group and watch include filter
|
||||
include_filters_rule = list({*watch.get('include_filters', []), *include_filters_from_tags})
|
||||
include_filters_rule = list(dict.fromkeys(watch.get('include_filters', []) + include_filters_from_tags))
|
||||
|
||||
subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='subtractive_selectors'),
|
||||
*watch.get("subtractive_selectors", []),
|
||||
|
||||
@@ -160,6 +160,12 @@ $(document).ready(function () {
|
||||
e.offsetX > item.left * y_scale && e.offsetX < item.left * y_scale + item.width * y_scale
|
||||
|
||||
) {
|
||||
// Ignore really large ones, because we are scraping 'div' also from xpath_element_scraper but
|
||||
// that div or whatever could be some wrapper and would generally make you select the whole page
|
||||
if (item.width > 800 && item.height > 400) {
|
||||
return
|
||||
}
|
||||
|
||||
// There could be many elements here, record them all and then we'll find out which is the most 'useful'
|
||||
// (input, textarea, button, A etc)
|
||||
if (item.width < xpath_data['browser_width']) {
|
||||
|
||||
@@ -28,15 +28,11 @@ $(document).ready(function() {
|
||||
notification_format: $('#notification_format').val(),
|
||||
notification_title: $('#notification_title').val(),
|
||||
notification_urls: $('.notification-urls').val(),
|
||||
tags: $('#tags').val(),
|
||||
window_url: window.location.href,
|
||||
}
|
||||
|
||||
|
||||
if (!data['notification_urls'].length) {
|
||||
alert("Notification URL list is empty, cannot send test.")
|
||||
return;
|
||||
}
|
||||
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: notification_base_url,
|
||||
@@ -49,7 +45,7 @@ $(document).ready(function() {
|
||||
}
|
||||
}).done(function(data){
|
||||
console.log(data);
|
||||
alert('Sent');
|
||||
alert(data);
|
||||
}).fail(function(data){
|
||||
console.log(data);
|
||||
alert('There was an error communicating with the server.');
|
||||
|
||||
@@ -68,7 +68,7 @@
|
||||
--color-last-checked: #bbb;
|
||||
--color-text-footer: #444;
|
||||
--color-border-watch-table-cell: #eee;
|
||||
--color-text-watch-tag-list: #e70069;
|
||||
--color-text-watch-tag-list: rgba(231, 0, 105, 0.4);
|
||||
--color-background-new-watch-form: rgba(0, 0, 0, 0.05);
|
||||
--color-background-new-watch-input: var(--color-white);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
@@ -111,7 +111,7 @@ html[data-darkmode="true"] {
|
||||
--color-background-input: var(--color-grey-350);
|
||||
--color-text-input-description: var(--color-grey-600);
|
||||
--color-text-input-placeholder: var(--color-grey-600);
|
||||
--color-text-watch-tag-list: #fa3e92;
|
||||
--color-text-watch-tag-list: rgba(250, 62, 146, 0.4);
|
||||
--color-background-code: var(--color-grey-200);
|
||||
--color-background-tab: rgba(0, 0, 0, 0.2);
|
||||
--color-background-tab-hover: rgba(0, 0, 0, 0.5);
|
||||
|
||||
@@ -75,7 +75,7 @@
|
||||
--color-text-footer: #444;
|
||||
--color-border-watch-table-cell: #eee;
|
||||
|
||||
--color-text-watch-tag-list: #e70069;
|
||||
--color-text-watch-tag-list: rgba(231, 0, 105, 0.4);
|
||||
--color-background-new-watch-form: rgba(0, 0, 0, 0.05);
|
||||
--color-background-new-watch-input: var(--color-white);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
@@ -127,7 +127,7 @@ html[data-darkmode="true"] {
|
||||
--color-background-input: var(--color-grey-350);
|
||||
--color-text-input-description: var(--color-grey-600);
|
||||
--color-text-input-placeholder: var(--color-grey-600);
|
||||
--color-text-watch-tag-list: #fa3e92;
|
||||
--color-text-watch-tag-list: rgba(250, 62, 146, 0.4);
|
||||
--color-background-code: var(--color-grey-200);
|
||||
|
||||
--color-background-tab: rgba(0, 0, 0, 0.2);
|
||||
|
||||
@@ -187,8 +187,11 @@ code {
|
||||
}
|
||||
|
||||
.watch-tag-list {
|
||||
color: var(--color-text-watch-tag-list);
|
||||
color: var(--color-white);
|
||||
white-space: nowrap;
|
||||
background: var(--color-text-watch-tag-list);
|
||||
border-radius: 5px;
|
||||
padding: 2px 5px;
|
||||
}
|
||||
|
||||
.box {
|
||||
@@ -1096,3 +1099,16 @@ ul {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
#chrome-extension-link {
|
||||
img {
|
||||
height: 21px;
|
||||
padding: 2px;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
padding: 9px;
|
||||
border: 1px solid var(--color-grey-800);
|
||||
border-radius: 10px;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
|
||||
@@ -284,7 +284,7 @@ ul#requests-extra_browsers {
|
||||
--color-last-checked: #bbb;
|
||||
--color-text-footer: #444;
|
||||
--color-border-watch-table-cell: #eee;
|
||||
--color-text-watch-tag-list: #e70069;
|
||||
--color-text-watch-tag-list: rgba(231, 0, 105, 0.4);
|
||||
--color-background-new-watch-form: rgba(0, 0, 0, 0.05);
|
||||
--color-background-new-watch-input: var(--color-white);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
@@ -327,7 +327,7 @@ html[data-darkmode="true"] {
|
||||
--color-background-input: var(--color-grey-350);
|
||||
--color-text-input-description: var(--color-grey-600);
|
||||
--color-text-input-placeholder: var(--color-grey-600);
|
||||
--color-text-watch-tag-list: #fa3e92;
|
||||
--color-text-watch-tag-list: rgba(250, 62, 146, 0.4);
|
||||
--color-background-code: var(--color-grey-200);
|
||||
--color-background-tab: rgba(0, 0, 0, 0.2);
|
||||
--color-background-tab-hover: rgba(0, 0, 0, 0.5);
|
||||
@@ -532,8 +532,11 @@ code {
|
||||
margin: 0 3px 0 5px; }
|
||||
|
||||
.watch-tag-list {
|
||||
color: var(--color-text-watch-tag-list);
|
||||
white-space: nowrap; }
|
||||
color: var(--color-white);
|
||||
white-space: nowrap;
|
||||
background: var(--color-text-watch-tag-list);
|
||||
border-radius: 5px;
|
||||
padding: 2px 5px; }
|
||||
|
||||
.box {
|
||||
max-width: 80%;
|
||||
@@ -1180,3 +1183,13 @@ ul {
|
||||
.restock-label.not-in-stock {
|
||||
background-color: var(--color-background-button-cancel);
|
||||
color: #777; }
|
||||
|
||||
#chrome-extension-link {
|
||||
padding: 9px;
|
||||
border: 1px solid var(--color-grey-800);
|
||||
border-radius: 10px;
|
||||
vertical-align: middle; }
|
||||
#chrome-extension-link img {
|
||||
height: 21px;
|
||||
padding: 2px;
|
||||
vertical-align: middle; }
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from distutils.util import strtobool
|
||||
from changedetectionio.strtobool import strtobool
|
||||
|
||||
from flask import (
|
||||
flash
|
||||
@@ -657,7 +657,10 @@ class ChangeDetectionStore:
|
||||
return res
|
||||
|
||||
def tag_exists_by_name(self, tag_name):
|
||||
return any(v.get('title', '').lower() == tag_name.lower() for k, v in self.__data['settings']['application']['tags'].items())
|
||||
# Check if any tag dictionary has a 'title' attribute matching the provided tag_name
|
||||
tags = self.__data['settings']['application']['tags'].values()
|
||||
return next((v for v in tags if v.get('title', '').lower() == tag_name.lower()),
|
||||
None)
|
||||
|
||||
def get_updates_available(self):
|
||||
import inspect
|
||||
|
||||
23
changedetectionio/strtobool.py
Normal file
23
changedetectionio/strtobool.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Because strtobool was removed in python 3.12 distutils
|
||||
|
||||
_MAP = {
|
||||
'y': True,
|
||||
'yes': True,
|
||||
't': True,
|
||||
'true': True,
|
||||
'on': True,
|
||||
'1': True,
|
||||
'n': False,
|
||||
'no': False,
|
||||
'f': False,
|
||||
'false': False,
|
||||
'off': False,
|
||||
'0': False
|
||||
}
|
||||
|
||||
|
||||
def strtobool(value):
|
||||
try:
|
||||
return _MAP[str(value).lower()]
|
||||
except KeyError:
|
||||
raise ValueError('"{}" is not a valid bool value'.format(value))
|
||||
@@ -147,7 +147,19 @@
|
||||
<section class="content">
|
||||
<div id="overlay">
|
||||
<div class="content">
|
||||
<strong>changedetection.io needs your support!</strong><br>
|
||||
<h4>Try our Chrome extension</h4>
|
||||
<p>
|
||||
<a id="chrome-extension-link"
|
||||
title="Try our new Chrome Extension!"
|
||||
href="https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop">
|
||||
<img src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}">
|
||||
Chrome Webstore
|
||||
</a>
|
||||
</p>
|
||||
|
||||
Easily add the current web-page from your browser directly into your changedetection.io tool, more great features coming soon!
|
||||
|
||||
<h4>Changedetection.io needs your support!</h4>
|
||||
<p>
|
||||
You can help us by supporting changedetection.io on these platforms;
|
||||
</p>
|
||||
|
||||
@@ -7,7 +7,8 @@
|
||||
<script>
|
||||
const browser_steps_available_screenshots=JSON.parse('{{ watch.get_browsersteps_available_screenshots|tojson }}');
|
||||
const browser_steps_config=JSON.parse('{{ browser_steps_config|tojson }}');
|
||||
const browser_steps_fetch_screenshot_image_url="{{url_for('browser_steps.browser_steps_fetch_screenshot_image', uuid=uuid)}}";
|
||||
<!-- Should be _external so that firefox and others load it more reliably -->
|
||||
const browser_steps_fetch_screenshot_image_url="{{url_for('browser_steps.browser_steps_fetch_screenshot_image', uuid=uuid, _external=True)}}";
|
||||
const browser_steps_last_error_step={{ watch.browser_steps_last_error_step|tojson }};
|
||||
const browser_steps_start_url="{{url_for('browser_steps.browsersteps_start_session', uuid=uuid)}}";
|
||||
const browser_steps_sync_url="{{url_for('browser_steps.browsersteps_ui_update', uuid=uuid)}}";
|
||||
@@ -31,6 +32,7 @@
|
||||
<script src="{{url_for('static_content', group='js', filename='browser-steps.js')}}" defer></script>
|
||||
{% endif %}
|
||||
|
||||
{% set has_tag_filters_extra="WARNING: Watch has tag/groups set with special filters\n" if has_special_tag_options else '' %}
|
||||
<script src="{{url_for('static_content', group='js', filename='recheck-proxy.js')}}" defer></script>
|
||||
|
||||
<div class="edit-form monospaced-textarea">
|
||||
@@ -280,7 +282,7 @@ User-Agent: wonderbra 1.0") }}
|
||||
<div class="pure-control-group">
|
||||
{% set field = render_field(form.include_filters,
|
||||
rows=5,
|
||||
placeholder="#example
|
||||
placeholder=has_tag_filters_extra+"#example
|
||||
xpath://body/div/span[contains(@class, 'example-class')]",
|
||||
class="m-d")
|
||||
%}
|
||||
@@ -316,13 +318,14 @@ xpath://body/div/span[contains(@class, 'example-class')]",
|
||||
</span>
|
||||
</div>
|
||||
<fieldset class="pure-control-group">
|
||||
{{ render_field(form.subtractive_selectors, rows=5, placeholder="header
|
||||
{{ render_field(form.subtractive_selectors, rows=5, placeholder=has_tag_filters_extra+"header
|
||||
footer
|
||||
nav
|
||||
.stockticker") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li> Remove HTML element(s) by CSS selector before text conversion. </li>
|
||||
<li> Don't paste HTML here, use only CSS selectors </li>
|
||||
<li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li>
|
||||
</ul>
|
||||
</span>
|
||||
@@ -436,7 +439,7 @@ Unavailable") }}
|
||||
<div class="pure-control-group">
|
||||
{% if visualselector_enabled %}
|
||||
<span class="pure-form-message-inline">
|
||||
The Visual Selector tool lets you select the <i>text</i> elements that will be used for the change detection ‐ after the <i>Browser Steps</i> has completed.<br><br>
|
||||
The Visual Selector tool lets you select the <i>text</i> elements that will be used for the change detection ‐ after the <i>Browser Steps</i> has completed, this tool is a helper to manage filters in the "CSS/JSONPath/JQ/XPath Filters" box of the <a href="#filters-and-triggers">Filters & Triggers</a> tab.
|
||||
</span>
|
||||
|
||||
<div id="selector-header">
|
||||
|
||||
@@ -107,7 +107,7 @@
|
||||
<option value="" style="color: #aaa"> -- none --</option>
|
||||
<option value="url">URL</option>
|
||||
<option value="title">Title</option>
|
||||
<option value="include_filter">CSS/xPath filter</option>
|
||||
<option value="include_filters">CSS/xPath filter</option>
|
||||
<option value="tag">Group / Tag name(s)</option>
|
||||
<option value="interval_minutes">Recheck time (minutes)</option>
|
||||
</select></td>
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||
<script>
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test', watch_uuid=uuid)}}";
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test', mode="global-settings")}}";
|
||||
{% if emailprefix %}
|
||||
const email_notification_prefix=JSON.parse('{{emailprefix|tojson}}');
|
||||
{% endif %}
|
||||
@@ -168,12 +168,12 @@ nav
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="api">
|
||||
|
||||
<h4>API Access</h4>
|
||||
<p>Drive your changedetection.io via API, More about <a href="https://github.com/dgtlmoon/changedetection.io/wiki/API-Reference">API access here</a></p>
|
||||
|
||||
<div class="pure-control-group">
|
||||
{{ render_checkbox_field(form.application.form.api_access_token_enabled) }}
|
||||
<div class="pure-form-message-inline">Restrict API access limit by using <code>x-api-key</code> header</div><br>
|
||||
<div class="pure-form-message-inline">Restrict API access limit by using <code>x-api-key</code> header - required for the Chrome Extension to work</div><br>
|
||||
<div class="pure-form-message-inline"><br>API Key <span id="api-key">{{api_key}}</span>
|
||||
<span style="display:none;" id="api-key-copy" >copy</span>
|
||||
</div>
|
||||
@@ -181,6 +181,20 @@ nav
|
||||
<div class="pure-control-group">
|
||||
<a href="{{url_for('settings_reset_api_key')}}" class="pure-button button-small button-cancel">Regenerate API key</a>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
<h4>Chrome Extension</h4>
|
||||
<p>Easily add any web-page to your changedetection.io installation from within Chrome.</p>
|
||||
<strong>Step 1</strong> Install the extension, <strong>Step 2</strong> Navigate to this page,
|
||||
<strong>Step 3</strong> Open the extension from the toolbar and click "<i>Sync API Access</i>"
|
||||
<p>
|
||||
<a id="chrome-extension-link"
|
||||
title="Try our new Chrome Extension!"
|
||||
href="https://chromewebstore.google.com/detail/changedetectionio-website/kefcfmgmlhmankjmnbijimhofdjekbop">
|
||||
<img src="{{ url_for('static_content', group='images', filename='Google-Chrome-icon.png') }}">
|
||||
Chrome Webstore
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="tab-pane-inner" id="proxies">
|
||||
<div id="recommended-proxy">
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.jinja' import render_simple_field, render_field, render_nolabel_field %}
|
||||
{% from '_helpers.jinja' import render_simple_field, render_field, render_nolabel_field, sort_by_title %}
|
||||
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<div id="watch-add-wrapper-zone">
|
||||
|
||||
{{ render_nolabel_field(form.url, placeholder="https://...", required=true) }}
|
||||
{{ render_nolabel_field(form.tags, value=tags[active_tag].title if active_tag else '', placeholder="watch label / tag") }}
|
||||
{{ render_nolabel_field(form.tags, value=active_tag.title if active_tag else '', placeholder="watch label / tag") }}
|
||||
{{ render_nolabel_field(form.watch_submit_button, title="Watch this URL!" ) }}
|
||||
{{ render_nolabel_field(form.edit_and_watch_submit_button, title="Edit first then Watch") }}
|
||||
</div>
|
||||
@@ -37,6 +37,7 @@
|
||||
<button class="pure-button button-secondary button-xsmall" name="op" value="assign-tag" id="checkbox-assign-tag">Tag</button>
|
||||
<button class="pure-button button-secondary button-xsmall" name="op" value="mark-viewed">Mark viewed</button>
|
||||
<button class="pure-button button-secondary button-xsmall" name="op" value="notification-default">Use default notification</button>
|
||||
<button class="pure-button button-secondary button-xsmall" name="op" value="clear-errors">Clear errors</button>
|
||||
<button class="pure-button button-secondary button-xsmall" style="background: #dd4242;" name="op" value="clear-history">Clear/reset history</button>
|
||||
<button class="pure-button button-secondary button-xsmall" style="background: #dd4242;" name="op" value="delete">Delete</button>
|
||||
</div>
|
||||
@@ -46,11 +47,13 @@
|
||||
{% if search_q %}<div id="search-result-info">Searching "<strong><i>{{search_q}}</i></strong>"</div>{% endif %}
|
||||
<div>
|
||||
<a href="{{url_for('index')}}" class="pure-button button-tag {{'active' if not active_tag }}">All</a>
|
||||
{% for uuid, tag in tags.items() %}
|
||||
{% if tag != "" %}
|
||||
<a href="{{url_for('index', tag=uuid) }}" class="pure-button button-tag {{'active' if active_tag == uuid }}">{{ tag.title }}</a>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
<!-- tag list -->
|
||||
{% for uuid, tag in tags %}
|
||||
{% if tag != "" %}
|
||||
<a href="{{url_for('index', tag=uuid) }}" class="pure-button button-tag {{'active' if active_tag_uuid == uuid }}">{{ tag.title }}</a>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
{% set sort_order = sort_order or 'asc' %}
|
||||
@@ -197,8 +200,8 @@
|
||||
</li>
|
||||
{% endif %}
|
||||
<li>
|
||||
<a href="{{ url_for('form_watch_checknow', tag=active_tag, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Recheck
|
||||
all {% if active_tag%} in "{{tags[active_tag].title}}"{%endif%}</a>
|
||||
<a href="{{ url_for('form_watch_checknow', tag=active_tag_uuid, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag ">Recheck
|
||||
all {% if active_tag_uuid %} in "{{active_tag.title}}"{%endif%}</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='Generic_Feed-icon.svg')}}" height="15"></a>
|
||||
|
||||
@@ -0,0 +1,56 @@
|
||||
import os
|
||||
from flask import url_for
|
||||
from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
|
||||
|
||||
def test_execute_custom_js(client, live_server):
|
||||
|
||||
live_server_setup(live_server)
|
||||
assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
|
||||
|
||||
test_url = url_for('test_interactive_html_endpoint', _external=True)
|
||||
test_url = test_url.replace('localhost.localdomain', 'cdio')
|
||||
test_url = test_url.replace('localhost', 'cdio')
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
'fetch_backend': "html_webdriver",
|
||||
'webdriver_js_execute_code': 'document.querySelector("button[name=test-button]").click();',
|
||||
'headers': "testheader: yes\buser-agent: MyCustomAgent",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"unpaused" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
uuid = extract_UUID_from_client(client)
|
||||
assert live_server.app.config['DATASTORE'].data['watching'][uuid].history_n >= 1, "Watch history had atleast 1 (everything fetched OK)"
|
||||
|
||||
assert b"This text should be removed" not in res.data
|
||||
|
||||
# Check HTML conversion detected and workd
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid=uuid),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"This text should be removed" not in res.data
|
||||
assert b"I smell JavaScript because the button was pressed" in res.data
|
||||
|
||||
assert b"testheader: yes" in res.data
|
||||
assert b"user-agent: mycustomagent" in res.data
|
||||
|
||||
client.get(
|
||||
url_for("form_delete", uuid="all"),
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -95,7 +95,7 @@ def test_restock_detection(client, live_server):
|
||||
|
||||
# We should have a notification
|
||||
time.sleep(2)
|
||||
assert os.path.isfile("test-datastore/notification.txt")
|
||||
assert os.path.isfile("test-datastore/notification.txt"), "Notification received"
|
||||
os.unlink("test-datastore/notification.txt")
|
||||
|
||||
# Default behaviour is to only fire notification when it goes OUT OF STOCK -> IN STOCK
|
||||
@@ -103,4 +103,9 @@ def test_restock_detection(client, live_server):
|
||||
set_original_response()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
assert not os.path.isfile("test-datastore/notification.txt")
|
||||
assert not os.path.isfile("test-datastore/notification.txt"), "No notification should have fired when it went OUT OF STOCK by default"
|
||||
|
||||
# BUT we should see that it correctly shows "not in stock"
|
||||
res = client.get(url_for("index"))
|
||||
assert b'not-in-stock' in res.data, "Correctly showing NOT IN STOCK in the list after it changed from IN STOCK"
|
||||
|
||||
|
||||
@@ -100,6 +100,12 @@ def test_setup_group_tag(client, live_server):
|
||||
assert b'Should be only this' in res.data
|
||||
assert b'And never this' not in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
# 2307 the UI notice should appear in the placeholder
|
||||
assert b'WARNING: Watch has tag/groups set with special filters' in res.data
|
||||
|
||||
# RSS Group tag filter
|
||||
# An extra one that should be excluded
|
||||
@@ -321,3 +327,154 @@ def test_clone_tag_on_quickwatchform_add(client, live_server):
|
||||
|
||||
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||
assert b'All tags deleted' in res.data
|
||||
|
||||
def test_order_of_filters_tag_filter_and_watch_filter(client, live_server):
|
||||
|
||||
# Add a tag with some config, import a tag and it should roughly work
|
||||
res = client.post(
|
||||
url_for("tags.form_tag_add"),
|
||||
data={"name": "test-tag-keep-order"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Tag added" in res.data
|
||||
assert b"test-tag-keep-order" in res.data
|
||||
tag_filters = [
|
||||
'#only-this', # duplicated filters
|
||||
'#only-this',
|
||||
'#only-this',
|
||||
'#only-this',
|
||||
]
|
||||
|
||||
res = client.post(
|
||||
url_for("tags.form_tag_edit_submit", uuid="first"),
|
||||
data={"name": "test-tag-keep-order",
|
||||
"include_filters": '\n'.join(tag_filters) },
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated" in res.data
|
||||
tag_uuid = get_UUID_for_tag_name(client, name="test-tag-keep-order")
|
||||
res = client.get(
|
||||
url_for("tags.form_tag_edit", uuid="first")
|
||||
)
|
||||
assert b"#only-this" in res.data
|
||||
|
||||
|
||||
d = """<html>
|
||||
<body>
|
||||
Some initial text<br>
|
||||
<p id="only-this">And 1 this</p>
|
||||
<br>
|
||||
<p id="not-this">And 2 this</p>
|
||||
<p id="">And 3 this</p><!--/html/body/p[3]/-->
|
||||
<p id="">And 4 this</p><!--/html/body/p[4]/-->
|
||||
<p id="">And 5 this</p><!--/html/body/p[5]/-->
|
||||
<p id="">And 6 this</p><!--/html/body/p[6]/-->
|
||||
<p id="">And 7 this</p><!--/html/body/p[7]/-->
|
||||
<p id="">And 8 this</p><!--/html/body/p[8]/-->
|
||||
<p id="">And 9 this</p><!--/html/body/p[9]/-->
|
||||
<p id="">And 10 this</p><!--/html/body/p[10]/-->
|
||||
<p id="">And 11 this</p><!--/html/body/p[11]/-->
|
||||
<p id="">And 12 this</p><!--/html/body/p[12]/-->
|
||||
<p id="">And 13 this</p><!--/html/body/p[13]/-->
|
||||
<p id="">And 14 this</p><!--/html/body/p[14]/-->
|
||||
<p id="not-this">And 15 this</p><!--/html/body/p[15]/-->
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(d)
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
filters = [
|
||||
'/html/body/p[3]',
|
||||
'/html/body/p[4]',
|
||||
'/html/body/p[5]',
|
||||
'/html/body/p[6]',
|
||||
'/html/body/p[7]',
|
||||
'/html/body/p[8]',
|
||||
'/html/body/p[9]',
|
||||
'/html/body/p[10]',
|
||||
'/html/body/p[11]',
|
||||
'/html/body/p[12]',
|
||||
'/html/body/p[13]', # duplicated tags
|
||||
'/html/body/p[13]',
|
||||
'/html/body/p[13]',
|
||||
'/html/body/p[13]',
|
||||
'/html/body/p[13]',
|
||||
'/html/body/p[14]',
|
||||
]
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": '\n'.join(filters),
|
||||
"url": test_url,
|
||||
"tags": "test-tag-keep-order",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"And 1 this" in res.data # test-tag-keep-order
|
||||
|
||||
a_tag_filter_check = b'And 1 this' #'#only-this' of tag_filters
|
||||
# check there is no duplication of tag_filters
|
||||
assert res.data.count(a_tag_filter_check) == 1, f"duplicated filters didn't removed {res.data.count(a_tag_filter_check)} of {a_tag_filter_check} in {res.data=}"
|
||||
|
||||
a_filter_check = b"And 13 this" # '/html/body/p[13]'
|
||||
# check there is no duplication of filters
|
||||
assert res.data.count(a_filter_check) == 1, f"duplicated filters didn't removed. {res.data.count(a_filter_check)} of {a_filter_check} in {res.data=}"
|
||||
|
||||
a_filter_check_not_include = b"And 2 this" # '/html/body/p[2]'
|
||||
assert a_filter_check_not_include not in res.data
|
||||
|
||||
checklist = [
|
||||
b"And 3 this",
|
||||
b"And 4 this",
|
||||
b"And 5 this",
|
||||
b"And 6 this",
|
||||
b"And 7 this",
|
||||
b"And 8 this",
|
||||
b"And 9 this",
|
||||
b"And 10 this",
|
||||
b"And 11 this",
|
||||
b"And 12 this",
|
||||
b"And 13 this",
|
||||
b"And 14 this",
|
||||
b"And 1 this", # result of filter from tag.
|
||||
]
|
||||
# check whether everything a user requested is there
|
||||
for test in checklist:
|
||||
assert test in res.data
|
||||
|
||||
# check whether everything a user requested is in order of filters.
|
||||
n = 0
|
||||
for test in checklist:
|
||||
t_index = res.data[n:].find(test)
|
||||
# if the text is not searched, return -1.
|
||||
assert t_index >= 0, f"""failed because {test=} not in {res.data[n:]=}
|
||||
#####################
|
||||
Looks like some feature changed the order of result of filters.
|
||||
#####################
|
||||
the {test} appeared before. {test in res.data[:n]=}
|
||||
{res.data[:n]=}
|
||||
"""
|
||||
n += t_index + len(test)
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
@@ -29,7 +29,8 @@ def test_fetch_pdf(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b'PDF-1.5' not in res.data
|
||||
# PDF header should not be there (it was converted to text)
|
||||
assert b'PDF' not in res.data[:10]
|
||||
assert b'hello world' in res.data
|
||||
|
||||
# So we know if the file changes in other ways
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
@@ -255,6 +255,69 @@ def test_xpath23_prefix_validation(client, live_server):
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_xpath1_lxml(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
|
||||
d = '''<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0">
|
||||
<channel>
|
||||
<title>rpilocator.com</title>
|
||||
<link>https://rpilocator.com</link>
|
||||
<description>Find Raspberry Pi Computers in Stock</description>
|
||||
<lastBuildDate>Thu, 19 May 2022 23:27:30 GMT</lastBuildDate>
|
||||

|
||||
<item>
|
||||
<title>Stock Alert (UK): RPi CM4</title>
|
||||
<foo>something else unrelated</foo>
|
||||
</item>
|
||||
<item>
|
||||
<title>Stock Alert (UK): Big monitorěěěě</title>
|
||||
<foo>something else unrelated</foo>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>'''.encode('utf-8')
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "wb") as f:
|
||||
f.write(d)
|
||||
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"include_filters": "xpath1://title/text()", "url": test_url, "tags": "", "headers": "",
|
||||
'fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
##### #2312
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("index"))
|
||||
assert b'_ElementStringResult' not in res.data # tested with 5.1.1 when it was removed and 5.1.0
|
||||
assert b'Exception' not in res.data
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"rpilocator.com" in res.data # in selector
|
||||
assert "Stock Alert (UK): Big monitorěěěě".encode('utf-8') in res.data # not in selector
|
||||
|
||||
#####
|
||||
|
||||
|
||||
def test_xpath1_validation(client, live_server):
|
||||
# Add our URL to the import page
|
||||
|
||||
@@ -242,5 +242,28 @@ def live_server_setup(live_server):
|
||||
resp.headers['Content-Type'] = 'application/pdf'
|
||||
return resp
|
||||
|
||||
@live_server.app.route('/test-interactive-html-endpoint')
|
||||
def test_interactive_html_endpoint():
|
||||
header_text=""
|
||||
for k,v in request.headers.items():
|
||||
header_text += f"{k}: {v}<br>"
|
||||
|
||||
resp = make_response(f"""
|
||||
<html>
|
||||
<body>
|
||||
Primitive JS check for <pre>changedetectionio/tests/visualselector/test_fetch_data.py</pre>
|
||||
<p id="remove">This text should be removed</p>
|
||||
<form onsubmit="event.preventDefault();">
|
||||
<!-- obfuscated text so that we dont accidentally get a false positive due to conversion of the source :) --->
|
||||
<button name="test-button" onclick="getElementById('remove').remove();getElementById('some-content').innerHTML = atob('SSBzbWVsbCBKYXZhU2NyaXB0IGJlY2F1c2UgdGhlIGJ1dHRvbiB3YXMgcHJlc3NlZCE=')">Click here</button>
|
||||
<div id=some-content></div>
|
||||
<pre>
|
||||
{header_text.lower()}
|
||||
</pre>
|
||||
</body>
|
||||
</html>""", 200)
|
||||
resp.headers['Content-Type'] = 'text/html'
|
||||
return resp
|
||||
|
||||
live_server.start()
|
||||
|
||||
|
||||
@@ -7,15 +7,19 @@ from ..util import live_server_setup, wait_for_all_checks, extract_UUID_from_cli
|
||||
def test_setup(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
|
||||
# Add a site in paused mode, add an invalid filter, we should still have visual selector data ready
|
||||
def test_visual_selector_content_ready(client, live_server):
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
|
||||
|
||||
# Add our URL to the import page, because the docker container (playwright/selenium) wont be able to connect to our usual test url
|
||||
test_url = "https://changedetection.io/ci-test/test-runjs.html"
|
||||
test_url = url_for('test_interactive_html_endpoint', _external=True)
|
||||
test_url = test_url.replace('localhost.localdomain', 'cdio')
|
||||
test_url = test_url.replace('localhost', 'cdio')
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
@@ -23,28 +27,31 @@ def test_visual_selector_content_ready(client, live_server):
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||
|
||||
uuid = extract_UUID_from_client(client)
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||
url_for("edit_page", uuid=uuid, unpause_on_save=1),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_webdriver",
|
||||
'webdriver_js_execute_code': 'document.querySelector("button[name=test-button]").click();'
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
# For now, cookies doesnt work in headers because it must be a full cookiejar object
|
||||
'headers': "testheader: yes\buser-agent: MyCustomAgent",
|
||||
'fetch_backend': "html_webdriver",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"unpaused" in res.data
|
||||
wait_for_all_checks(client)
|
||||
uuid = extract_UUID_from_client(client)
|
||||
|
||||
# Check the JS execute code before extract worked
|
||||
|
||||
assert live_server.app.config['DATASTORE'].data['watching'][uuid].history_n >= 1, "Watch history had atleast 1 (everything fetched OK)"
|
||||
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
url_for("preview_page", uuid=uuid),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b'I smell JavaScript' in res.data
|
||||
assert b"testheader: yes" in res.data
|
||||
assert b"user-agent: mycustomagent" in res.data
|
||||
|
||||
|
||||
assert os.path.isfile(os.path.join('test-datastore', uuid, 'last-screenshot.png')), "last-screenshot.png should exist"
|
||||
assert os.path.isfile(os.path.join('test-datastore', uuid, 'elements.json')), "xpath elements.json data should exist"
|
||||
@@ -74,30 +81,33 @@ def test_visual_selector_content_ready(client, live_server):
|
||||
|
||||
def test_basic_browserstep(client, live_server):
|
||||
|
||||
assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
|
||||
#live_server_setup(live_server)
|
||||
assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
|
||||
|
||||
# Add our URL to the import page, because the docker container (playwright/selenium) wont be able to connect to our usual test url
|
||||
test_url = "https://changedetection.io/ci-test/test-runjs.html"
|
||||
test_url = url_for('test_interactive_html_endpoint', _external=True)
|
||||
test_url = test_url.replace('localhost.localdomain', 'cdio')
|
||||
test_url = test_url.replace('localhost', 'cdio')
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": '', 'edit_and_watch_submit_button': 'Edit > Watch'},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Watch added in Paused state, saving will unpause" in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first", unpause_on_save=1),
|
||||
data={
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_webdriver",
|
||||
'browser_steps-0-operation': 'Goto site',
|
||||
'browser_steps-1-operation': 'Click element',
|
||||
'browser_steps-1-selector': 'button[name=test-button]',
|
||||
'browser_steps-1-optional_value': ''
|
||||
"url": test_url,
|
||||
"tags": "",
|
||||
'fetch_backend': "html_webdriver",
|
||||
'browser_steps-0-operation': 'Goto site',
|
||||
'browser_steps-1-operation': 'Click element',
|
||||
'browser_steps-1-selector': 'button[name=test-button]',
|
||||
'browser_steps-1-optional_value': '',
|
||||
# For now, cookies doesnt work in headers because it must be a full cookiejar object
|
||||
'headers': "testheader: yes\buser-agent: MyCustomAgent",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
@@ -105,6 +115,9 @@ def test_basic_browserstep(client, live_server):
|
||||
wait_for_all_checks(client)
|
||||
|
||||
uuid = extract_UUID_from_client(client)
|
||||
assert live_server.app.config['DATASTORE'].data['watching'][uuid].history_n >= 1, "Watch history had atleast 1 (everything fetched OK)"
|
||||
|
||||
assert b"This text should be removed" not in res.data
|
||||
|
||||
# Check HTML conversion detected and workd
|
||||
res = client.get(
|
||||
@@ -114,13 +127,19 @@ def test_basic_browserstep(client, live_server):
|
||||
assert b"This text should be removed" not in res.data
|
||||
assert b"I smell JavaScript because the button was pressed" in res.data
|
||||
|
||||
assert b"testheader: yes" in res.data
|
||||
assert b"user-agent: mycustomagent" in res.data
|
||||
|
||||
four_o_four_url = url_for('test_endpoint', status_code=404, _external=True)
|
||||
four_o_four_url = four_o_four_url.replace('localhost.localdomain', 'cdio')
|
||||
four_o_four_url = four_o_four_url.replace('localhost', 'cdio')
|
||||
|
||||
# now test for 404 errors
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid=uuid, unpause_on_save=1),
|
||||
data={
|
||||
"url": "https://changedetection.io/404",
|
||||
"url": four_o_four_url,
|
||||
"tags": "",
|
||||
"headers": "",
|
||||
'fetch_backend': "html_webdriver",
|
||||
'browser_steps-0-operation': 'Goto site',
|
||||
'browser_steps-1-operation': 'Click element',
|
||||
|
||||
@@ -369,6 +369,12 @@ class update_worker(threading.Thread):
|
||||
}
|
||||
)
|
||||
process_changedetection_results = False
|
||||
except content_fetchers.exceptions.BrowserFetchTimedOut as e:
|
||||
self.datastore.update_watch(uuid=uuid,
|
||||
update_obj={'last_error': e.msg
|
||||
}
|
||||
)
|
||||
process_changedetection_results = False
|
||||
except content_fetchers.exceptions.BrowserStepsStepException as e:
|
||||
|
||||
if not self.datastore.data['watching'].get(uuid):
|
||||
@@ -456,7 +462,7 @@ class update_worker(threading.Thread):
|
||||
except Exception as e:
|
||||
logger.error(f"Exception reached processing watch UUID: {uuid}")
|
||||
logger.error(str(e))
|
||||
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
|
||||
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': "Exception: " + str(e)})
|
||||
# Other serious error
|
||||
process_changedetection_results = False
|
||||
# import traceback
|
||||
|
||||
BIN
docs/chrome-extension-screenshot.png
Normal file
BIN
docs/chrome-extension-screenshot.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 125 KiB |
@@ -1,7 +1,7 @@
|
||||
# Used by Pyppeteer
|
||||
pyee
|
||||
|
||||
eventlet>=0.33.3 # related to dnspython fixes
|
||||
eventlet==0.33.3 # related to dnspython fixes
|
||||
feedgen~=0.9
|
||||
flask-compress
|
||||
# 0.6.3 included compatibility fix for werkzeug 3.x (2.x had deprecation of url handlers)
|
||||
@@ -9,6 +9,7 @@ flask-login>=0.6.3
|
||||
flask-paginate
|
||||
flask_expects_json~=1.7
|
||||
flask_restful
|
||||
flask_cors # For the Chrome extension to operate
|
||||
flask_wtf~=1.2
|
||||
flask~=2.3
|
||||
inscriptis~=2.2
|
||||
@@ -22,21 +23,25 @@ validators~=0.21
|
||||
brotli~=1.0
|
||||
requests[socks]
|
||||
|
||||
urllib3>1.26
|
||||
urllib3==1.26.18
|
||||
chardet>2.3.0
|
||||
|
||||
wtforms~=3.0
|
||||
jsonpath-ng~=1.5.3
|
||||
|
||||
dnspython~=2.4 # related to eventlet fixes
|
||||
# Pinned: module 'eventlet.green.select' has no attribute 'epoll'
|
||||
# https://github.com/eventlet/eventlet/issues/805#issuecomment-1640463482
|
||||
dnspython==2.3.0 # related to eventlet fixes
|
||||
|
||||
# jq not available on Windows so must be installed manually
|
||||
|
||||
# Notification library
|
||||
apprise~=1.7.1
|
||||
apprise~=1.7.4
|
||||
|
||||
# apprise mqtt https://github.com/dgtlmoon/changedetection.io/issues/315
|
||||
paho-mqtt
|
||||
# and 2.0.0 https://github.com/dgtlmoon/changedetection.io/issues/2241 not yet compatible
|
||||
# use v1.x due to https://github.com/eclipse/paho.mqtt.python/issues/814
|
||||
paho-mqtt>=1.6.1,<2.0.0
|
||||
|
||||
# This mainly affects some ARM builds, which unlike the other builds ignores "ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1"
|
||||
# so without this pinning, the newer versions on ARM will forcefully try to build rust, which results in "rust compiler not found"
|
||||
@@ -47,7 +52,7 @@ cryptography~=3.4
|
||||
beautifulsoup4
|
||||
|
||||
# XPath filtering, lxml is required by bs4 anyway, but put it here to be safe.
|
||||
lxml
|
||||
lxml >=4.8.0,<6
|
||||
|
||||
# XPath 2.0-3.1 support - 4.2.0 broke something?
|
||||
elementpath==4.1.5
|
||||
@@ -70,7 +75,7 @@ pillow
|
||||
# playwright is installed at Dockerfile build time because it's not available on all platforms
|
||||
|
||||
# experimental release
|
||||
pyppeteer-ng==2.0.0rc2
|
||||
pyppeteer-ng==2.0.0rc5
|
||||
|
||||
# Include pytest, so if theres a support issue we can ask them to run these tests on their setup
|
||||
pytest ~=7.2
|
||||
|
||||
Reference in New Issue
Block a user