Compare commits

...

28 Commits

Author SHA1 Message Date
dgtlmoon
575df972a4 Merge branch 'master' into 2118-fix-missing-arg 2024-02-06 14:49:34 +01:00
dgtlmoon
72c7645f60 Fix - Pinning elementpath xPath filter library to 4.1.5 (#2164) 2024-02-06 14:49:10 +01:00
dgtlmoon
f1babbac33 Re #2118 fix missing default var 2024-02-06 13:05:21 +01:00
dgtlmoon
e09eb47fb7 Restock detection - Update stock-not-in-stock.js (NL) 2024-02-03 22:32:39 +01:00
dgtlmoon
616c0b3f65 New text filter - Sort text alphabetically filter (#2153) 2024-02-02 11:36:58 +01:00
dgtlmoon
c90b27823a Filtering - include_filters in group and watch settings should not duplicate (#2151 #1845) 2024-02-02 09:30:01 +01:00
dgtlmoon
3b16b19a94 Record notification count and show in [stats] tab (#2150) 2024-02-02 09:12:44 +01:00
Antonio Neri
4ee9fa79e1 Restock - Update stock-not-in-stock.js Italian translation (#2149)
Added `prodotto esaurito` - Italian for out of stock
2024-02-01 16:35:31 +01:00
dgtlmoon
4b49759113 UI - Show error/warning when trying to compare the same version 2024-02-01 10:36:43 +01:00
dgtlmoon
e9a9790cb0 Fetching - Make an obvious error when using BrowserSteps with the simple text fetcher (#2145) 2024-02-01 00:09:27 +01:00
dgtlmoon
593660e2f6 Fix for switching to price-data-follower mode (when page has JSON price data), only needs to be queued once. Re #1565 2024-01-31 22:39:24 +01:00
dgtlmoon
7d96b4ba83 Fetching - Always record server software reply headers (will be used in the future) (#2143) 2024-01-31 16:15:43 +01:00
dgtlmoon
fca40e4d5b Testing - General test workflow improvements (#2144) 2024-01-31 15:10:44 +01:00
dgtlmoon
66e2dfcead RSS - Include link to the watched URL in the feed (#2139 #2131 and #327) 2024-01-29 16:26:14 +01:00
dgtlmoon
bce7eb68fb Notifications - skip empty notification URLs from being processed (#2138) 2024-01-29 14:20:39 +01:00
dgtlmoon
93c0385119 UI - Filters & Triggers - Adding example for keyword matching in a line 2024-01-29 14:18:14 +01:00
dgtlmoon
e17f3be739 RSS - Adding performance stats 2024-01-29 13:05:11 +01:00
dgtlmoon
3a9f79b756 Notification - logging - adding performance information for processing time of notifications #327 2024-01-29 12:55:08 +01:00
dgtlmoon
1f5670253e UI - Adding icon to show which watch has Browser Steps enabled (#2137) 2024-01-29 12:36:53 +01:00
dgtlmoon
fe3cf5ffd2 Logging - Adding extra debug logging to change detection (#2136) 2024-01-29 11:21:21 +01:00
dgtlmoon
d31a45d49a Fetcher - Improve status_code logging (#2130 #2122) 2024-01-25 09:53:00 +01:00
Conner
19ee65361d Notifications - Bugfix: Notification format not being set correct (HTML emails being sent as plaintext and other problems) (#2129) 2024-01-24 20:45:45 +01:00
dgtlmoon
677082723c Restock tweaks - use a single regex, tidy up height detection (#2125) 2024-01-23 13:31:05 +01:00
dgtlmoon
96793890f8 Notification - Templates - Adding an example of how to use URL encoding with tokens 2024-01-22 12:20:50 +01:00
dgtlmoon
0439155127 Notification - Templates - Adding an example of how to use |tojson for JSON payloads 2024-01-22 11:06:55 +01:00
dependabot[bot]
29ca2521eb Build maintenance - dependabot - Bump the all build helpers (#2121) 2024-01-20 11:47:14 +01:00
Andrew Peabody
7d67ad057c Enable dependabot for github-actions (#2119) 2024-01-20 10:03:24 +01:00
dgtlmoon
2e88872b7e Update docker-compose.yml 2024-01-19 23:14:55 +01:00
32 changed files with 339 additions and 91 deletions

10
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,10 @@
version: 2
updates:
- package-ecosystem: github-actions
directory: /
schedule:
interval: "weekly"
groups:
all:
patterns:
- "*"

View File

@@ -34,7 +34,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -45,7 +45,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
@@ -59,4 +59,4 @@ jobs:
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
uses: github/codeql-action/analyze@v3

View File

@@ -41,7 +41,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: 3.11

View File

@@ -9,7 +9,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Install pypa/build
@@ -21,7 +21,7 @@ jobs:
- name: Build a binary wheel and a source tarball
run: python3 -m build
- name: Store the distribution packages
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: python-package-distributions
path: dist/
@@ -34,18 +34,18 @@ jobs:
- build
steps:
- name: Download all the dists
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: python-package-distributions
path: dist/
- name: Test that the basic pip built package runs without error
run: |
set -e
set -ex
pip3 install dist/changedetection.io*.whl
changedetection.io -d /tmp -p 10000 &
sleep 3
curl http://127.0.0.1:10000/static/styles/pure-min.css >/dev/null
curl http://127.0.0.1:10000/ >/dev/null
curl --retry-connrefused --retry 6 http://127.0.0.1:10000/static/styles/pure-min.css >/dev/null
curl --retry-connrefused --retry 6 http://127.0.0.1:10000/ >/dev/null
killall changedetection.io
@@ -64,7 +64,7 @@ jobs:
steps:
- name: Download all the dists
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: python-package-distributions
path: dist/

View File

@@ -26,7 +26,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: 3.11

View File

@@ -11,7 +11,7 @@ jobs:
# Mainly just for link/flake8
- name: Set up Python 3.11
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: '3.11'
@@ -27,7 +27,7 @@ jobs:
run: |
docker network create changedet-network
# Selenium+browserless
docker run --network changedet-network -d --hostname selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome:4
docker run --network changedet-network -d --name browserless --hostname browserless -e "FUNCTION_BUILT_INS=[\"fs\",\"crypto\"]" -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.60-chrome-stable
@@ -47,7 +47,7 @@ jobs:
# Debug SMTP server/echo message back server
docker run --network changedet-network -d -p 11025:11025 -p 11080:11080 --hostname mailserver test-changedetectionio bash -c 'python changedetectionio/tests/smtp/smtp-test-server.py'
- name: Test built container with pytest
- name: Test built container with Pytest (generally as requests/plaintext fetching)
run: |
# Unit tests
echo "run test with unittest"
@@ -61,20 +61,32 @@ jobs:
# append the docker option. e.g. '-e LOGGER_LEVEL=DEBUG'
docker run --network changedet-network test-changedetectionio bash -c 'cd changedetectionio && ./run_basic_tests.sh'
- name: Test built container selenium+browserless/playwright
- name: Specific tests in built container for Selenium
run: |
# Selenium fetch
docker run --rm -e "WEBDRIVER_URL=http://selenium:4444/wd/hub" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py'
- name: Specific tests in built container for Playwright
run: |
# Playwright/Browserless fetch
docker run --rm -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest tests/fetchers/test_content.py && pytest tests/test_errorhandling.py && pytest tests/visualselector/test_fetch_data.py'
- name: Specific tests in built container for headers and requests checks with Playwright
run: |
# Settings headers playwright tests - Call back in from Browserless, check headers
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
- name: Specific tests in built container for headers and requests checks with Selenium
run: |
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "WEBDRIVER_URL=http://selenium:4444/wd/hub" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
- name: Specific tests in built container with Playwright as Puppeteer experimental fetcher
run: |
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "USE_EXPERIMENTAL_PUPPETEER_FETCH=yes" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
- name: Test built container restock detection via Playwright
run: |
# restock detection via playwright - added name=changedet here so that playwright/browserless can connect to it
docker run --rm --name "changedet" -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://browserless:3000" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio;pytest --live-server-port=5004 --live-server-host=0.0.0.0 tests/restock/test_restock.py'
@@ -106,10 +118,10 @@ jobs:
docker run --name test-changedetectionio -p 5556:5000 -d test-changedetectionio
sleep 3
# Should return 0 (no error) when grep finds it
curl -s http://localhost:5556 |grep -q checkbox-uuid
curl --retry-connrefused --retry 6 -s http://localhost:5556 |grep -q checkbox-uuid
# and IPv6
curl -s -g -6 "http://[::1]:5556"|grep -q checkbox-uuid
curl --retry-connrefused --retry 6 -s -g -6 "http://[::1]:5556"|grep -q checkbox-uuid
# Check whether TRACE log is enabled.
# Also, check whether TRACE is came from STDERR

View File

@@ -18,8 +18,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q: PriorityQueue
def accept(uuid):
datastore.data['watching'][uuid]['track_ldjson_price_data'] = PRICE_DATA_TRACK_ACCEPT
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
return redirect(url_for("form_watch_checknow", uuid=uuid))
return redirect(url_for("index"))
@login_required
@price_data_follower_blueprint.route("/<string:uuid>/reject", methods=['GET'])

View File

@@ -51,8 +51,9 @@ class BrowserStepsStepException(Exception):
return
# @todo - make base Exception class that announces via logger()
class PageUnloadable(Exception):
def __init__(self, status_code, url, message, screenshot=False):
def __init__(self, status_code=None, url='', message='', screenshot=False):
# Set this so we can use it in other parts of the app
self.status_code = status_code
self.url = url
@@ -60,6 +61,10 @@ class PageUnloadable(Exception):
self.message = message
return
class BrowserStepsInUnsupportedFetcher(Exception):
def __init__(self, url):
self.url = url
return
class EmptyReply(Exception):
def __init__(self, status_code, url, screenshot=None):
@@ -389,10 +394,24 @@ class base_html_playwright(Fetcher):
raise PageUnloadable(url=url, status_code=None, message=f"Timed out connecting to browserless, retrying..")
else:
# 200 Here means that the communication to browserless worked only, not the page state
if response.status_code == 200:
try:
x = response.json()
except Exception as e:
raise PageUnloadable(url=url, message="Error reading JSON response from browserless")
try:
self.status_code = response.status_code
except Exception as e:
raise PageUnloadable(url=url, message="Error reading status_code code response from browserless")
self.headers = x.get('headers')
if self.status_code != 200 and not ignore_status_codes:
raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, page_html=x.get('content',''))
if self.status_code == 200:
import base64
x = response.json()
if not x.get('screenshot'):
# https://github.com/puppeteer/puppeteer/blob/v1.0.0/docs/troubleshooting.md#tips
# https://github.com/puppeteer/puppeteer/issues/1834
@@ -403,16 +422,10 @@ class base_html_playwright(Fetcher):
if not x.get('content', '').strip():
raise EmptyReply(url=url, status_code=None)
if x.get('status_code', 200) != 200 and not ignore_status_codes:
raise Non200ErrorCodeReceived(url=url, status_code=x.get('status_code', 200), page_html=x['content'])
self.content = x.get('content')
self.headers = x.get('headers')
self.instock_data = x.get('instock_data')
self.screenshot = base64.b64decode(x.get('screenshot'))
self.status_code = x.get('status_code')
self.xpath_data = x.get('xpath_data')
else:
# Some other error from browserless
raise PageUnloadable(url=url, status_code=None, message=response.content.decode('utf-8'))
@@ -511,8 +524,13 @@ class base_html_playwright(Fetcher):
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
self.page.wait_for_timeout(extra_wait * 1000)
self.status_code = response.status
try:
self.status_code = response.status
except Exception as e:
# https://github.com/dgtlmoon/changedetection.io/discussions/2122#discussioncomment-8241962
logger.critical(f"Response from browserless/playwright did not have a status_code! Response follows.")
logger.critical(response)
raise PageUnloadable(url=url, status_code=None, message=str(e))
if self.status_code != 200 and not ignore_status_codes:
@@ -698,6 +716,9 @@ class html_requests(Fetcher):
current_include_filters=None,
is_binary=False):
if self.browser_steps_get_valid_steps():
raise BrowserStepsInUnsupportedFetcher(url=url)
# Make requests use a more modern looking user-agent
if not {k.lower(): v for k, v in request_headers.items()}.get('user-agent', None):
request_headers['User-Agent'] = os.getenv("DEFAULT_SETTINGS_HEADERS_USERAGENT",
@@ -737,6 +758,8 @@ class html_requests(Fetcher):
if encoding:
r.encoding = encoding
self.headers = r.headers
if not r.content or not len(r.content):
raise EmptyReply(url=url, status_code=r.status_code)
@@ -753,7 +776,7 @@ class html_requests(Fetcher):
else:
self.content = r.text
self.headers = r.headers
self.raw_content = r.content

View File

@@ -13,7 +13,6 @@ from threading import Event
import datetime
import flask_login
from loguru import logger
import sys
import os
import pytz
import queue
@@ -317,6 +316,9 @@ def changedetection_app(config=None, datastore_o=None):
@app.route("/rss", methods=['GET'])
def rss():
from jinja2 import Environment, BaseLoader
jinja2_env = Environment(loader=BaseLoader)
now = time.time()
# Always requires token set
app_rss_token = datastore.data['settings']['application'].get('rss_access_token')
rss_url_token = request.args.get('token')
@@ -380,8 +382,12 @@ def changedetection_app(config=None, datastore_o=None):
include_equal=False,
line_feed_sep="<br>")
fe.content(content="<html><body><h4>{}</h4>{}</body></html>".format(watch_title, html_diff),
type='CDATA')
# @todo Make this configurable and also consider html-colored markup
# @todo User could decide if <link> goes to the diff page, or to the watch link
rss_template = "<html><body>\n<h4><a href=\"{{watch_url}}\">{{watch_title}}</a></h4>\n<p>{{html_diff}}</p>\n</body></html>\n"
content = jinja2_env.from_string(rss_template).render(watch_title=watch_title, html_diff=html_diff, watch_url=watch.link)
fe.content(content=content, type='CDATA')
fe.guid(guid, permalink=False)
dt = datetime.datetime.fromtimestamp(int(watch.newest_history_key))
@@ -390,6 +396,7 @@ def changedetection_app(config=None, datastore_o=None):
response = make_response(fg.rss_str())
response.headers.set('Content-Type', 'application/rss+xml;charset=utf-8')
logger.trace(f"RSS generated in {time.time() - now:.3f}s")
return response
@app.route("/", methods=['GET'])
@@ -1603,7 +1610,7 @@ def notification_runner():
n_object['notification_title'] = datastore.data['settings']['application'].get('notification_title')
if not n_object.get('notification_format') and datastore.data['settings']['application'].get('notification_format'):
n_object['notification_title'] = datastore.data['settings']['application'].get('notification_format')
n_object['notification_format'] = datastore.data['settings']['application'].get('notification_format')
sent_obj = notification.process_notification(n_object, datastore)

View File

@@ -465,6 +465,7 @@ class watchForm(commonSettingsForm):
method = SelectField('Request method', choices=valid_method, default=default_method)
ignore_status_codes = BooleanField('Ignore status codes (process non-2xx status codes as normal)', default=False)
check_unique_lines = BooleanField('Only trigger when unique lines appear', default=False)
sort_text_alphabetically = BooleanField('Sort text alphabetically', default=False)
filter_text_added = BooleanField('Added lines', default=True)
filter_text_replaced = BooleanField('Replaced/changed lines', default=True)

View File

@@ -45,6 +45,7 @@ base_config = {
'last_error': False,
'last_viewed': 0, # history key value of the last viewed via the [diff] link
'method': 'GET',
'notification_alert_count': 0,
# Custom notification content
'notification_body': None,
'notification_format': default_notification_format_for_watch,
@@ -56,6 +57,8 @@ base_config = {
'previous_md5': False,
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
'proxy': None, # Preferred proxy connection
'remote_server_reply': None, # From 'server' reply header
'sort_text_alphabetically': False,
'subtractive_selectors': [],
'tag': '', # Old system of text name for a tag, to be removed
'tags': [], # list of UUIDs to App.Tags
@@ -246,10 +249,10 @@ class model(dict):
@property
def has_browser_steps(self):
has_browser_steps = self.get('browser_steps') and list(filter(
lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
self.get('browser_steps')))
lambda s: (s['operation'] and len(s['operation']) and s['operation'] != 'Choose one' and s['operation'] != 'Goto site'),
self.get('browser_steps')))
return has_browser_steps
return has_browser_steps
# Returns the newest key, but if theres only 1 record, then it's counted as not being new, so return 0.
@property

View File

@@ -116,6 +116,9 @@ def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs):
def process_notification(n_object, datastore):
now = time.time()
if n_object.get('notification_timestamp'):
logger.trace(f"Time since queued {now-n_object['notification_timestamp']:.3f}s")
# Insert variables into the notification content
notification_parameters = create_notification_parameters(n_object, datastore)
@@ -133,6 +136,8 @@ def process_notification(n_object, datastore):
# Initially text or whatever
n_format = datastore.data['settings']['application'].get('notification_format', valid_notification_formats[default_notification_format])
logger.trace(f"Complete notification body including Jinja and placeholders calculated in {time.time() - now:.3f}s")
# https://github.com/caronc/apprise/wiki/Development_LogCapture
# Anything higher than or equal to WARNING (which covers things like Connection errors)
# raise it as an exception
@@ -147,6 +152,10 @@ def process_notification(n_object, datastore):
with apprise.LogCapture(level=apprise.logging.DEBUG) as logs:
for url in n_object['notification_urls']:
url = url.strip()
if not url:
logger.warning(f"Process Notification: skipping empty notification URL.")
continue
logger.info(">> Process Notification: AppRise notifying {}".format(url))
url = jinja2_env.from_string(url).render(**notification_parameters)

View File

@@ -1,8 +1,9 @@
import hashlib
import urllib3
from . import difference_detection_processor
from copy import deepcopy
from loguru import logger
import hashlib
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
@@ -43,11 +44,13 @@ class perform_site_check(difference_detection_processor):
fetched_md5 = hashlib.md5(self.fetcher.instock_data.encode('utf-8')).hexdigest()
# 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold.
update_obj["in_stock"] = True if self.fetcher.instock_data == 'Possibly in stock' else False
logger.debug(f"Watch UUID {uuid} restock check returned '{self.fetcher.instock_data}' from JS scraper.")
else:
raise UnableToExtractRestockData(status_code=self.fetcher.status_code)
# The main thing that all this at the moment comes down to :)
changed_detected = False
logger.debug(f"Watch UUID {uuid} restock check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}")
if watch.get('previous_md5') and watch.get('previous_md5') != fetched_md5:
# Yes if we only care about it going to instock, AND we are in stock
@@ -60,5 +63,4 @@ class perform_site_check(difference_detection_processor):
# Always record the new checksum
update_obj["previous_md5"] = fetched_md5
return changed_detected, update_obj, self.fetcher.instock_data.encode('utf-8').strip()

View File

@@ -6,11 +6,11 @@ import os
import re
import urllib3
from . import difference_detection_processor
from ..html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text
from changedetectionio import content_fetcher, html_tools
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
from copy import deepcopy
from . import difference_detection_processor
from ..html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text
from loguru import logger
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
@@ -116,7 +116,9 @@ class perform_site_check(difference_detection_processor):
# and then use getattr https://docs.python.org/3/reference/datamodel.html#object.__getitem__
# https://realpython.com/inherit-python-dict/ instead of doing it procedurely
include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='include_filters')
include_filters_rule = [*watch.get('include_filters', []), *include_filters_from_tags]
# 1845 - remove duplicated filters in both group and watch include filter
include_filters_rule = list({*watch.get('include_filters', []), *include_filters_from_tags})
subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='subtractive_selectors'),
*watch.get("subtractive_selectors", []),
@@ -202,6 +204,12 @@ class perform_site_check(difference_detection_processor):
is_rss=is_rss # #1874 activate the <title workaround hack
)
if watch.get('sort_text_alphabetically') and stripped_text_from_html:
# Note: Because a <p>something</p> will add an extra line feed to signify the paragraph gap
# we end up with 'Some text\n\n', sorting will add all those extra \n at the start, so we remove them here.
stripped_text_from_html = stripped_text_from_html.replace('\n\n', '\n')
stripped_text_from_html = '\n'.join( sorted(stripped_text_from_html.splitlines(), key=lambda x: x.lower() ))
# Re #340 - return the content before the 'ignore text' was applied
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
@@ -335,6 +343,8 @@ class perform_site_check(difference_detection_processor):
if not watch['title'] or not len(watch['title']):
update_obj['title'] = html_tools.extract_element(find='title', html_content=self.fetcher.content)
logger.debug(f"Watch UUID {uuid} content check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}")
if changed_detected:
if watch.get('check_unique_lines', False):
has_unique_lines = watch.lines_contain_something_unique_compared_to_history(lines=stripped_text_from_html.splitlines())

View File

@@ -1,3 +1,10 @@
// Restock Detector
// (c) Leigh Morresi dgtlmoon@gmail.com
//
// Assumes the product is in stock to begin with, unless the following appears above the fold ;
// - outOfStockTexts appears above the fold (out of stock)
// - negateOutOfStockRegex (really is in stock)
function isItemInStock() {
// @todo Pass these in so the same list can be used in non-JS fetchers
const outOfStockTexts = [
@@ -29,6 +36,7 @@ function isItemInStock() {
'nicht zur verfügung',
'niet beschikbaar',
'niet leverbaar',
'niet op voorraad',
'no disponible temporalmente',
'no longer in stock',
'no tickets available',
@@ -40,6 +48,7 @@ function isItemInStock() {
'não estamos a aceitar encomendas',
'out of stock',
'out-of-stock',
'prodotto esaurito',
'produkt niedostępny',
'sold out',
'sold-out',
@@ -56,6 +65,7 @@ function isItemInStock() {
'품절'
];
const vh = Math.max(document.documentElement.clientHeight || 0, window.innerHeight || 0);
function getElementBaseText(element) {
// .textContent can include text from children which may give the wrong results
// scan only immediate TEXT_NODEs, which will be a child of the element
@@ -66,19 +76,13 @@ function isItemInStock() {
return text.toLowerCase().trim();
}
const negateOutOfStockRegexs = [
'[0-9] in stock'
]
var negateOutOfStockRegexs_r = [];
for (let i = 0; i < negateOutOfStockRegexs.length; i++) {
negateOutOfStockRegexs_r.push(new RegExp(negateOutOfStockRegexs[0], 'g'));
}
const negateOutOfStockRegex = new RegExp('([0-9] in stock|add to cart)', 'ig');
// The out-of-stock or in-stock-text is generally always above-the-fold
// and often below-the-fold is a list of related products that may or may not contain trigger text
// so it's good to filter to just the 'above the fold' elements
// and it should be atleast 100px from the top to ignore items in the toolbar, sometimes menu items like "Coming soon" exist
const elementsToScan = Array.from(document.getElementsByTagName('*')).filter(element => element.getBoundingClientRect().top + window.scrollY <= window.innerHeight && element.getBoundingClientRect().top + window.scrollY >= 100);
const elementsToScan = Array.from(document.getElementsByTagName('*')).filter(element => element.getBoundingClientRect().top + window.scrollY <= vh && element.getBoundingClientRect().top + window.scrollY >= 100);
var elementText = "";
@@ -94,10 +98,8 @@ function isItemInStock() {
if (elementText.length) {
// try which ones could mean its in stock
for (let i = 0; i < negateOutOfStockRegexs.length; i++) {
if (negateOutOfStockRegexs_r[i].test(elementText)) {
return 'Possibly in stock';
}
if (negateOutOfStockRegex.test(elementText)) {
return 'Possibly in stock';
}
}
}

View File

@@ -0,0 +1,44 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
aria-hidden="true"
viewBox="0 0 19.966091 17.999964"
class="css-1oqmxjn"
version="1.1"
id="svg4"
sodipodi:docname="steps.svg"
width="19.966091"
height="17.999964"
inkscape:version="1.1.2 (0a00cf5339, 2022-02-04)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs8" />
<sodipodi:namedview
id="namedview6"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
showgrid="false"
fit-margin-top="0"
fit-margin-left="0"
fit-margin-right="0"
fit-margin-bottom="0"
inkscape:zoom="8.6354167"
inkscape:cx="-1.3896261"
inkscape:cy="6.1375151"
inkscape:window-width="1280"
inkscape:window-height="667"
inkscape:window-x="2419"
inkscape:window-y="250"
inkscape:window-maximized="0"
inkscape:current-layer="svg4" />
<path
d="m 16.95807,12.000003 c -0.7076,0.0019 -1.3917,0.2538 -1.9316,0.7113 -0.5398,0.4575 -0.9005,1.091 -1.0184,1.7887 H 5.60804 c -0.80847,0.0297 -1.60693,-0.1865 -2.29,-0.62 -0.26632,-0.1847 -0.48375,-0.4315 -0.63356,-0.7189 -0.14982,-0.2874 -0.22753,-0.607 -0.22644,-0.9311 -0.02843,-0.3931 0.03646,-0.7873 0.1894,-1.1505 0.15293,-0.3632 0.38957,-0.6851 0.6906,-0.9395 0.66628,-0.4559004 1.4637,-0.6807004 2.27,-0.6400004 h 8.35003 c 0.8515,-0.0223 1.6727,-0.3206 2.34,-0.85 0.3971,-0.3622 0.7076,-0.8091 0.9084,-1.3077 0.2008,-0.49857 0.2868,-1.03596 0.2516,-1.57229 0.0113,-0.47161 -0.0887,-0.93924 -0.292,-1.36493 -0.2033,-0.4257 -0.5041,-0.79745 -0.878,-1.08507 -0.7801,-0.55815 -1.7212,-0.84609 -2.68,-0.82 H 5.95804 c -0.12537,-0.7417 -0.5248,-1.40924 -1.11913,-1.87032996 -0.59434,-0.46108 -1.3402,-0.68207 -2.08979,-0.61917 -0.74958,0.06291 -1.44818,0.40512 -1.95736,0.95881 C 0.28259,1.5230126 0,2.2477926 0,3.0000126 c 0,0.75222 0.28259,1.47699 0.79176,2.03068 0.50918,0.55369 1.20778,0.8959 1.95736,0.95881 0.74959,0.0629 1.49545,-0.15808 2.08979,-0.61917 0.59433,-0.46109 0.99376,-1.12863 1.11913,-1.87032 h 7.70003 c 0.7353,-0.03061 1.4599,0.18397 2.06,0.61 0.2548,0.19335 0.4595,0.445 0.597,0.73385 0.1375,0.28884 0.2036,0.60644 0.193,0.92615 0.0316,0.38842 -0.0247,0.77898 -0.165,1.14258 -0.1402,0.36361 -0.3607,0.69091 -0.645,0.95741 -0.5713,0.4398 -1.2799,0.663 -2,0.63 H 5.69804 c -1.03259,-0.0462 -2.05065,0.2568 -2.89,0.86 -0.43755,0.3361 -0.78838,0.7720004 -1.02322,1.2712004 -0.23484,0.4993 -0.34688,1.0474 -0.32678,1.5988 -0.00726,0.484 0.10591,0.9622 0.32934,1.3916 0.22344,0.4295 0.55012,0.7966 0.95066,1.0684 0.85039,0.5592 1.85274,0.8421 2.87,0.81 h 8.40003 c 0.0954,0.5643 0.3502,1.0896 0.7343,1.5138 0.3842,0.4242 0.8817,0.7297 1.4338,0.8803 0.5521,0.1507 1.1358,0.1403 1.6822,-0.0299 0.5464,-0.1702 1.0328,-0.4932 1.4016,-0.9308 0.3688,-0.4376 0.6048,-0.9716 0.6801,-1.5389 0.0752,-0.5673 -0.0134,-1.1444 -0.2554,-1.663 -0.242,-0.5186 -0.6273,-0.9572 -1.1104,-1.264 -0.4831,-0.3068 -1.0439,-0.469 -1.6162,-0.4675 z m 0,5 c -0.3956,0 -0.7823,-0.1173 -1.1112,-0.3371 -0.3289,-0.2197 -0.5852,-0.5321 -0.7366,-0.8975 -0.1514,-0.3655 -0.191,-0.7676 -0.1138,-1.1556 0.0772,-0.3879 0.2677,-0.7443 0.5474,-1.024 0.2797,-0.2797 0.636,-0.4702 1.024,-0.5474 0.388,-0.0771 0.7901,-0.0375 1.1555,0.1138 0.3655,0.1514 0.6778,0.4078 0.8976,0.7367 0.2198,0.3289 0.3371,0.7155 0.3371,1.1111 0,0.5304 -0.2107,1.0391 -0.5858,1.4142 -0.3751,0.3751 -0.8838,0.5858 -1.4142,0.5858 z"
id="path2"
style="fill:#777777;fill-opacity:1" />
</svg>

After

Width:  |  Height:  |  Size: 3.7 KiB

View File

@@ -90,5 +90,10 @@ $(document).ready(function () {
}
}
$('#diff-form').on('submit', function (e) {
if ($('select[name=from_version]').val() === $('select[name=to_version]').val()) {
e.preventDefault();
alert('Error - You are trying to compare the same version.');
}
});
});

View File

@@ -126,6 +126,8 @@ html[data-darkmode="true"] {
html[data-darkmode="true"] .watch-table .title-col a[target="_blank"]::after,
html[data-darkmode="true"] .watch-table .current-diff-url::after {
filter: invert(0.5) hue-rotate(10deg) brightness(2); }
html[data-darkmode="true"] .watch-table .status-browsersteps {
filter: invert(0.5) hue-rotate(10deg) brightness(1.5); }
html[data-darkmode="true"] .watch-table .watch-controls .state-off img {
opacity: 0.3; }
html[data-darkmode="true"] .watch-table .watch-controls .state-on img {

View File

@@ -152,6 +152,10 @@ html[data-darkmode="true"] {
filter: invert(.5) hue-rotate(10deg) brightness(2);
}
.status-browsersteps {
filter: invert(.5) hue-rotate(10deg) brightness(1.5);
}
.watch-controls {
.state-off {
img {

View File

@@ -342,6 +342,8 @@ html[data-darkmode="true"] {
html[data-darkmode="true"] .watch-table .title-col a[target="_blank"]::after,
html[data-darkmode="true"] .watch-table .current-diff-url::after {
filter: invert(0.5) hue-rotate(10deg) brightness(2); }
html[data-darkmode="true"] .watch-table .status-browsersteps {
filter: invert(0.5) hue-rotate(10deg) brightness(1.5); }
html[data-darkmode="true"] .watch-table .watch-controls .state-off img {
opacity: 0.3; }
html[data-darkmode="true"] .watch-table .watch-controls .state-on img {

View File

@@ -255,6 +255,7 @@ class ChangeDetectionStore:
'last_viewed': 0,
'previous_md5': False,
'previous_md5_before_filters': False,
'remote_server_reply': None,
'track_ldjson_price_data': None,
})

View File

@@ -115,6 +115,12 @@
Warning: Contents of <code>{{ '{{diff}}' }}</code>, <code>{{ '{{diff_removed}}' }}</code>, and <code>{{ '{{diff_added}}' }}</code> depend on how the difference algorithm perceives the change. <br>
For example, an addition or removal could be perceived as a change in some cases. <a target="_new" href="https://github.com/dgtlmoon/changedetection.io/wiki/Using-the-%7B%7Bdiff%7D%7D,-%7B%7Bdiff_added%7D%7D,-and-%7B%7Bdiff_removed%7D%7D-notification-tokens">More Here</a> <br>
</p>
<p>
For JSON payloads, use <strong>|tojson</strong> without quotes for automatic escaping, for example - <code>{ "name": {{ '{{ watch_title|tojson }}' }} }</code>
</p>
<p>
URL encoding, use <strong>|urlencode</strong>, for example - <code>gets://hook-website.com/test.php?title={{ '{{ watch_title|urlencode }}' }}</code>
</p>
</div>
</div>
<div class="pure-control-group">

View File

@@ -13,7 +13,7 @@
<script src="{{url_for('static_content', group='js', filename='diff-overview.js')}}" defer></script>
<div id="settings">
<form class="pure-form " action="" method="GET">
<form class="pure-form " action="" method="GET" id="diff-form">
<fieldset>
{% if versions|length >= 1 %}
<strong>Compare</strong>

View File

@@ -339,6 +339,10 @@ nav
<span class="pure-form-message-inline">When content is merely moved in a list, it will also trigger an <strong>addition</strong>, consider enabling <code><strong>Only trigger when unique lines appear</strong></code></span>
</fieldset>
<fieldset class="pure-control-group">
{{ render_checkbox_field(form.sort_text_alphabetically) }}
<span class="pure-form-message-inline">Helps reduce changes detected caused by sites shuffling lines around, combine with <i>check unique lines</i> below.</span>
</fieldset>
<fieldset class="pure-control-group">
{{ render_checkbox_field(form.check_unique_lines) }}
<span class="pure-form-message-inline">Good for websites that just move the content around, and you want to know when NEW content is added, compares new lines against all history for this watch.</span>
@@ -401,6 +405,7 @@ Unavailable") }}
<li>Use <code>//(?aiLmsux))</code> type flags (more <a href="https://docs.python.org/3/library/re.html#index-15">information here</a>)<br></li>
<li>Keyword example &dash; example <code>Out of stock</code></li>
<li>Use groups to extract just that text &dash; example <code>/reports.+?(\d+)/i</code> returns a list of years only</li>
<li>Example - match lines containing a keyword <code>/.*icecream.*/</code></li>
</ul>
</li>
<li>One line per regular-expression/string match</li>
@@ -482,6 +487,10 @@ Unavailable") }}
<td>Last fetch time</td>
<td>{{ watch.fetch_time }}s</td>
</tr>
<tr>
<td>Notification alert count</td>
<td>{{ watch.notification_alert_count }}</td>
</tr>
</tbody>
</table>
</div>

View File

@@ -110,6 +110,7 @@
{% endif %}
{%if watch.is_pdf %}<img class="status-icon" src="{{url_for('static_content', group='images', filename='pdf-icon.svg')}}" title="Converting PDF to text" >{% endif %}
{% if watch.has_browser_steps %}<img class="status-icon status-browsersteps" src="{{url_for('static_content', group='images', filename='steps.svg')}}" title="Browser Steps is enabled" >{% endif %}
{% if watch.last_error is defined and watch.last_error != False %}
<div class="fetch-error">{{ watch.last_error }}

View File

@@ -163,6 +163,7 @@ def test_api_simple(client, live_server):
# Loading the most recent snapshot should force viewed to become true
client.get(url_for("diff_history_page", uuid="first"), follow_redirects=True)
time.sleep(3)
# Fetch the whole watch again, viewed should be true
res = client.get(
url_for("watch", uuid=watch_uuid),

View File

@@ -10,7 +10,7 @@ def test_setup(live_server):
# Hard to just add more live server URLs when one test is already running (I think)
# So we add our test here (was in a different file)
def test_headers_in_request(client, live_server):
#live_server_setup(live_server)
#ve_server_setup(live_server)
# Add our URL to the import page
test_url = url_for('test_headers', _external=True)
if os.getenv('PLAYWRIGHT_DRIVER_URL'):
@@ -70,16 +70,17 @@ def test_headers_in_request(client, live_server):
wait_for_all_checks(client)
# Re #137 - Examine the JSON index file, it should have only one set of headers entered
# Re #137 - It should have only one set of headers entered
watches_with_headers = 0
with open('test-datastore/url-watches.json') as f:
app_struct = json.load(f)
for uuid in app_struct['watching']:
if (len(app_struct['watching'][uuid]['headers'])):
for k, watch in client.application.config.get('DATASTORE').data.get('watching').items():
if (len(watch['headers'])):
watches_with_headers += 1
assert watches_with_headers == 1
# 'server' http header was automatically recorded
for k, watch in client.application.config.get('DATASTORE').data.get('watching').items():
assert 'custom' in watch.get('remote_server_reply') # added in util.py
# Should be only one with headers set
assert watches_with_headers==1
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data

View File

@@ -2,7 +2,7 @@
import time
from flask import url_for
from .util import live_server_setup
from .util import live_server_setup, wait_for_all_checks
def set_original_ignore_response():
@@ -34,6 +34,23 @@ def set_modified_swapped_lines():
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
def set_modified_swapped_lines_with_extra_text_for_sorting():
test_return_data = """<html>
<body>
<p>&nbsp;Which is across multiple lines</p>
<p>Some initial text</p>
<p> So let's see what happens.</p>
<p>Z last</p>
<p>0 numerical</p>
<p>A uppercase</p>
<p>a lowercase</p>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
def set_modified_with_trigger_text_response():
test_return_data = """<html>
@@ -49,15 +66,14 @@ def set_modified_with_trigger_text_response():
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
def test_unique_lines_functionality(client, live_server):
def test_setup(client, live_server):
live_server_setup(live_server)
sleep_time_for_fetch_thread = 3
def test_unique_lines_functionality(client, live_server):
#live_server_setup(live_server)
set_original_ignore_response()
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
@@ -67,7 +83,7 @@ def test_unique_lines_functionality(client, live_server):
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(sleep_time_for_fetch_thread)
wait_for_all_checks(client)
# Add our URL to the import page
res = client.post(
@@ -83,12 +99,11 @@ def test_unique_lines_functionality(client, live_server):
# Make a change
set_modified_swapped_lines()
time.sleep(sleep_time_for_fetch_thread)
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
wait_for_all_checks(client)
# It should report nothing found (no new 'unviewed' class)
res = client.get(url_for("index"))
@@ -97,7 +112,57 @@ def test_unique_lines_functionality(client, live_server):
# Now set the content which contains the new text and re-ordered existing text
set_modified_with_trigger_text_response()
client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(sleep_time_for_fetch_thread)
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'unviewed' in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_sort_lines_functionality(client, live_server):
#live_server_setup(live_server)
set_modified_swapped_lines_with_extra_text_for_sorting()
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
wait_for_all_checks(client)
# Add our URL to the import page
res = client.post(
url_for("edit_page", uuid="first"),
data={"sort_text_alphabetically": "n",
"url": test_url,
"fetch_backend": "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
wait_for_all_checks(client)
res = client.get(url_for("index"))
# Should be a change registered
assert b'unviewed' in res.data
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True
)
assert res.data.find(b'0 numerical') < res.data.find(b'Z last')
assert res.data.find(b'A uppercase') < res.data.find(b'Z last')
assert res.data.find(b'Some initial text') < res.data.find(b'Which is across multiple lines')
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data

View File

@@ -175,12 +175,16 @@ def live_server_setup(live_server):
@live_server.app.route('/test-headers')
def test_headers():
output= []
output = []
for header in request.headers:
output.append("{}:{}".format(str(header[0]),str(header[1]) ))
output.append("{}:{}".format(str(header[0]), str(header[1])))
return "\n".join(output)
content = "\n".join(output)
resp = make_response(content, 200)
resp.headers['server'] = 'custom'
return resp
# Just return the body in the request
@live_server.app.route('/test-body', methods=['POST', 'GET'])

View File

@@ -31,6 +31,8 @@ class update_worker(threading.Thread):
dates = []
trigger_text = ''
now = time.time()
if watch:
watch_history = watch.history
dates = list(watch_history.keys())
@@ -72,13 +74,14 @@ class update_worker(threading.Thread):
'diff_full': diff.render_diff(prev_snapshot, current_snapshot, include_equal=True, line_feed_sep=line_feed_sep),
'diff_patch': diff.render_diff(prev_snapshot, current_snapshot, line_feed_sep=line_feed_sep, patch_format=True),
'diff_removed': diff.render_diff(prev_snapshot, current_snapshot, include_added=False, line_feed_sep=line_feed_sep),
'notification_timestamp': now,
'screenshot': watch.get_screenshot() if watch and watch.get('notification_screenshot') else None,
'triggered_text': triggered_text,
'uuid': watch.get('uuid') if watch else None,
'watch_url': watch.get('url') if watch else None,
})
logger.debug(">> SENDING NOTIFICATION")
logger.trace(f"Main rendered notification placeholders (diff_added etc) calculated in {time.time()-now:.3f}s")
logger.debug("Queued notification for sending")
notification_q.put(n_object)
# Prefer - Individual watch settings > Tag settings > Global settings (in that order)
@@ -147,6 +150,10 @@ class update_worker(threading.Thread):
queued = False
if n_object and n_object.get('notification_urls'):
queued = True
count = watch.get('notification_alert_count', 0) + 1
self.datastore.update_watch(uuid=watch_uuid, update_obj={'notification_alert_count': count})
self.queue_notification_for_watch(notification_q=self.notification_q, n_object=n_object, watch=watch)
return queued
@@ -427,6 +434,12 @@ class update_worker(threading.Thread):
'last_check_status': e.status_code,
'has_ldjson_price_data': None})
process_changedetection_results = False
except content_fetcher.BrowserStepsInUnsupportedFetcher as e:
err_text = "This watch has Browser Steps configured and so it cannot run with the 'Basic fast Plaintext/HTTP Client', either remove the Browser Steps or select a Chrome fetcher."
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text})
process_changedetection_results = False
logger.error(f"Exception (BrowserStepsInUnsupportedFetcher) reached processing watch UUID: {uuid}")
except UnableToExtractRestockData as e:
# Usually when fetcher.instock_data returns empty
logger.error(f"Exception (UnableToExtractRestockData) reached processing watch UUID: {uuid}")
@@ -471,13 +484,13 @@ class update_worker(threading.Thread):
# A change was detected
if changed_detected:
logger.debug(f">> Change detected in UUID {uuid} - {watch['url']}")
# Notifications should only trigger on the second time (first time, we gather the initial snapshot)
if watch.history_n >= 2:
logger.info(f"Change detected in UUID {uuid} - {watch['url']}")
if not self.datastore.data['watching'][uuid].get('notification_muted'):
self.send_content_changed_notification(watch_uuid=uuid)
else:
logger.info(f"Change triggered in UUID {uuid} due to first history saving (no notifications sent) - {watch['url']}")
except Exception as e:
# Catch everything possible here, so that if a worker crashes, we don't lose it until restart!
@@ -488,6 +501,16 @@ class update_worker(threading.Thread):
if self.datastore.data['watching'].get(uuid):
# Always record that we atleast tried
count = self.datastore.data['watching'][uuid].get('check_count', 0) + 1
# Record the 'server' header reply, can be used for actions in the future like cloudflare/akamai workarounds
try:
server_header = update_handler.fetcher.headers.get('server', '').strip().lower()[:255]
self.datastore.update_watch(uuid=uuid,
update_obj={'remote_server_reply': server_header}
)
except Exception as e:
pass
self.datastore.update_watch(uuid=uuid, update_obj={'fetch_time': round(time.time() - now, 3),
'last_checked': round(time.time()),
'check_count': count

View File

@@ -94,7 +94,9 @@ services:
#
# Used for fetching pages via Playwright+Chrome where you need Javascript support.
# Note: works well but is deprecated, does not fetch full page screenshots (doesnt work with Visual Selector) and other issues
# Note: Works well but is deprecated, does not fetch full page screenshots (doesnt work with Visual Selector)
# Does not report status codes (200, 404, 403) and other issues
# More information about the advantages of playwright/browserless https://www.browserless.io/blog/2023/12/13/migrating-selenium-to-playwright/
# browser-chrome:
# hostname: browser-chrome
# image: selenium/standalone-chrome:4

View File

@@ -46,8 +46,8 @@ beautifulsoup4
# XPath filtering, lxml is required by bs4 anyway, but put it here to be safe.
lxml
# XPath 2.0-3.1 support
elementpath
# XPath 2.0-3.1 support - 4.2.0 broke something?
elementpath==4.1.5
selenium~=4.14.0