mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-11-03 08:07:23 +00:00
Compare commits
59 Commits
notificati
...
janus-queu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
213a18c061 | ||
|
|
1633b94511 | ||
|
|
928b97e6e5 | ||
|
|
ea09168650 | ||
|
|
4f6e9dcc56 | ||
|
|
f0588a9dd1 | ||
|
|
aa4e182549 | ||
|
|
fe1f7c30e1 | ||
|
|
e5ed1ae349 | ||
|
|
d1b1dd70f4 | ||
|
|
93b14c9fc8 | ||
|
|
c9c5de20d8 | ||
|
|
011fa3540e | ||
|
|
c3c3671f8b | ||
|
|
5980bd9bcd | ||
|
|
438871429c | ||
|
|
173ce5bfa2 | ||
|
|
106b1f85fa | ||
|
|
a5c7f343d0 | ||
|
|
401886bcda | ||
|
|
c66fca9de9 | ||
|
|
daee4c5c17 | ||
|
|
af5d0b6963 | ||
|
|
f92dd81c8f | ||
|
|
55cdcfe3ea | ||
|
|
2f7520a6c5 | ||
|
|
4fdc5d7da2 | ||
|
|
308f30b2e8 | ||
|
|
4fa2042d12 | ||
|
|
2a4e1bad4e | ||
|
|
8a317eead5 | ||
|
|
b58094877f | ||
|
|
afe252126c | ||
|
|
342e6119f1 | ||
|
|
e4ff87e970 | ||
|
|
e45a544f15 | ||
|
|
9a5abaa17a | ||
|
|
b8ecfff861 | ||
|
|
58e2a41c95 | ||
|
|
a7214db9c3 | ||
|
|
b9da4af64f | ||
|
|
b77105be7b | ||
|
|
3d5a544ea6 | ||
|
|
4f362385e1 | ||
|
|
a01d6169d2 | ||
|
|
9beda3911d | ||
|
|
5ed596bfa9 | ||
|
|
99ca8787ab | ||
|
|
8f1a6feb90 | ||
|
|
c0e229201b | ||
|
|
66bc7fbc04 | ||
|
|
530bd40ca5 | ||
|
|
36004cf74b | ||
|
|
c7374245e1 | ||
|
|
59df59e9cd | ||
|
|
c0c2898b91 | ||
|
|
abac660bac | ||
|
|
26de64d873 | ||
|
|
79d9a8ca28 |
6
.github/test/Dockerfile-alpine
vendored
6
.github/test/Dockerfile-alpine
vendored
@@ -2,7 +2,7 @@
|
||||
# Test that we can still build on Alpine (musl modified libc https://musl.libc.org/)
|
||||
# Some packages wont install via pypi because they dont have a wheel available under this architecture.
|
||||
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.21
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.22
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
COPY requirements.txt /requirements.txt
|
||||
@@ -18,17 +18,19 @@ RUN \
|
||||
libxslt-dev \
|
||||
openssl-dev \
|
||||
python3-dev \
|
||||
file \
|
||||
zip \
|
||||
zlib-dev && \
|
||||
apk add --update --no-cache \
|
||||
libjpeg \
|
||||
libxslt \
|
||||
file \
|
||||
nodejs \
|
||||
poppler-utils \
|
||||
python3 && \
|
||||
echo "**** pip3 install test of changedetection.io ****" && \
|
||||
python3 -m venv /lsiopy && \
|
||||
pip install -U pip wheel setuptools && \
|
||||
pip install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.21/ -r /requirements.txt && \
|
||||
pip install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.22/ -r /requirements.txt && \
|
||||
apk del --purge \
|
||||
build-dependencies
|
||||
|
||||
4
.github/workflows/pypi-release.yml
vendored
4
.github/workflows/pypi-release.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
- build
|
||||
steps:
|
||||
- name: Download all the dists
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
@@ -72,7 +72,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Download all the dists
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
|
||||
@@ -179,6 +179,26 @@ jobs:
|
||||
|
||||
docker kill test-changedetectionio
|
||||
|
||||
- name: Test HTTPS SSL mode
|
||||
run: |
|
||||
openssl req -x509 -newkey rsa:4096 -keyout privkey.pem -out cert.pem -days 365 -nodes -subj "/CN=localhost"
|
||||
docker run --name test-changedetectionio-ssl --rm -e SSL_CERT_FILE=cert.pem -e SSL_PRIVKEY_FILE=privkey.pem -p 5000:5000 -v ./cert.pem:/app/cert.pem -v ./privkey.pem:/app/privkey.pem -d test-changedetectionio
|
||||
sleep 3
|
||||
# Should return 0 (no error) when grep finds it
|
||||
# -k because its self-signed
|
||||
curl --retry-connrefused --retry 6 -k https://localhost:5000 -v|grep -q checkbox-uuid
|
||||
|
||||
docker kill test-changedetectionio-ssl
|
||||
|
||||
- name: Test IPv6 Mode
|
||||
run: |
|
||||
# IPv6 - :: bind to all interfaces inside container (like 0.0.0.0), ::1 would be localhost only
|
||||
docker run --name test-changedetectionio-ipv6 --rm -p 5000:5000 -e LISTEN_HOST=:: -d test-changedetectionio
|
||||
sleep 3
|
||||
# Should return 0 (no error) when grep finds it on localhost
|
||||
curl --retry-connrefused --retry 6 http://[::1]:5000 -v|grep -q checkbox-uuid
|
||||
docker kill test-changedetectionio-ipv6
|
||||
|
||||
- name: Test changedetection.io SIGTERM and SIGINT signal shutdown
|
||||
run: |
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
libssl-dev \
|
||||
libxslt-dev \
|
||||
make \
|
||||
patch \
|
||||
zlib1g-dev
|
||||
|
||||
RUN mkdir /install
|
||||
@@ -53,6 +54,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
locales \
|
||||
# For pdftohtml
|
||||
poppler-utils \
|
||||
# favicon type detection and other uses
|
||||
file \
|
||||
zlib1g \
|
||||
&& apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@@ -81,6 +84,9 @@ COPY changedetection.py /app/changedetection.py
|
||||
ARG LOGGER_LEVEL=''
|
||||
ENV LOGGER_LEVEL="$LOGGER_LEVEL"
|
||||
|
||||
# Default
|
||||
ENV LC_ALL=en_US.UTF-8
|
||||
|
||||
WORKDIR /app
|
||||
CMD ["python", "./changedetection.py", "-d", "/datastore"]
|
||||
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
Copyright 2025 Web Technologies s.r.o.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,11 +1,21 @@
|
||||
## Web Site Change Detection, Monitoring and Notification.
|
||||
# Monitor website changes
|
||||
|
||||
Live your data-life pro-actively, track website content changes and receive notifications via Discord, Email, Slack, Telegram and 70+ more
|
||||
Detect WebPage Changes Automatically — Monitor Web Page Changes in Real Time
|
||||
|
||||
Monitor websites for updates — get notified via Discord, Email, Slack, Telegram, Webhook and many more.
|
||||
|
||||
Detect web page content changes and get instant alerts.
|
||||
|
||||
|
||||
[Changedetection.io is the best tool to monitor web-pages for changes](https://changedetection.io) Track website content changes and receive notifications via Discord, Email, Slack, Telegram and 90+ more
|
||||
|
||||
Ideal for monitoring price changes, content edits, conditional changes and more.
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring, list of websites with changes" title="Self-hosted web page change monitoring, list of websites with changes" />](https://changedetection.io)
|
||||
|
||||
|
||||
[**Don't have time? Let us host it for you! try our extremely affordable subscription use our proxies and support!**](https://changedetection.io)
|
||||
[**Don't have time? Try our extremely affordable subscription use our proxies and support!**](https://changedetection.io)
|
||||
|
||||
|
||||
|
||||
### Target specific parts of the webpage using the Visual Selector tool.
|
||||
|
||||
15
README.md
15
README.md
@@ -1,11 +1,13 @@
|
||||
## Web Site Change Detection, Restock monitoring and notifications.
|
||||
# Detect Website Changes Automatically — Monitor Web Page Changes in Real Time
|
||||
|
||||
**_Detect website content changes and perform meaningful actions - trigger notifications via Discord, Email, Slack, Telegram, API calls and many more._**
|
||||
Monitor websites for updates — get notified via Discord, Email, Slack, Telegram, Webhook and many more.
|
||||
|
||||
_Live your data-life pro-actively._
|
||||
**Detect web page content changes and get instant alerts.**
|
||||
|
||||
Ideal for monitoring price changes, content edits, conditional changes and more.
|
||||
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web site page change monitoring" title="Self-hosted web site page change monitoring" />](https://changedetection.io?src=github)
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Web site page change monitoring" title="Web site page change monitoring" />](https://changedetection.io?src=github)
|
||||
|
||||
[![Release Version][release-shield]][release-link] [![Docker Pulls][docker-pulls]][docker-link] [![License][license-shield]](LICENSE.md)
|
||||
|
||||
@@ -13,6 +15,7 @@ _Live your data-life pro-actively._
|
||||
|
||||
[**Get started with website page change monitoring straight away. Don't have time? Try our $8.99/month subscription, use our proxies and support!**](https://changedetection.io) , _half the price of other website change monitoring services!_
|
||||
|
||||
|
||||
- Chrome browser included.
|
||||
- Nothing to install, access via browser login after signup.
|
||||
- Super fast, no registration needed setup.
|
||||
@@ -99,9 +102,7 @@ _Need an actual Chrome runner with Javascript support? We support fetching via W
|
||||
- Configurable [proxy per watch](https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration)
|
||||
- Send a screenshot with the notification when a change is detected in the web page
|
||||
|
||||
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $100 using our signup link.
|
||||
|
||||
[Oxylabs](https://oxylabs.go2cloud.org/SH2d) is also an excellent proxy provider and well worth using, they offer Residental, ISP, Rotating and many other proxy types to suit your project.
|
||||
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $150 using our signup link.
|
||||
|
||||
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
|
||||
|
||||
__version__ = '0.50.2'
|
||||
__version__ = '0.50.8'
|
||||
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from json.decoder import JSONDecodeError
|
||||
@@ -35,13 +35,22 @@ def sigshutdown_handler(_signo, _stack_frame):
|
||||
app.config.exit.set()
|
||||
datastore.stop_thread = True
|
||||
|
||||
# Shutdown workers immediately
|
||||
# Shutdown workers and queues immediately
|
||||
try:
|
||||
from changedetectionio import worker_handler
|
||||
worker_handler.shutdown_workers()
|
||||
except Exception as e:
|
||||
logger.error(f"Error shutting down workers: {str(e)}")
|
||||
|
||||
# Close janus queues properly
|
||||
try:
|
||||
from changedetectionio.flask_app import update_q, notification_q
|
||||
update_q.close()
|
||||
notification_q.close()
|
||||
logger.debug("Janus queues closed successfully")
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to close janus queues: {e}")
|
||||
|
||||
# Shutdown socketio server fast
|
||||
from changedetectionio.flask_app import socketio_server
|
||||
if socketio_server and hasattr(socketio_server, 'shutdown'):
|
||||
@@ -65,8 +74,7 @@ def main():
|
||||
|
||||
datastore_path = None
|
||||
do_cleanup = False
|
||||
host = "0.0.0.0"
|
||||
ipv6_enabled = False
|
||||
host = os.environ.get("LISTEN_HOST", "0.0.0.0").strip()
|
||||
port = int(os.environ.get('PORT', 5000))
|
||||
ssl_mode = False
|
||||
|
||||
@@ -108,10 +116,6 @@ def main():
|
||||
if opt == '-d':
|
||||
datastore_path = arg
|
||||
|
||||
if opt == '-6':
|
||||
logger.success("Enabling IPv6 listen support")
|
||||
ipv6_enabled = True
|
||||
|
||||
# Cleanup (remove text files that arent in the index)
|
||||
if opt == '-c':
|
||||
do_cleanup = True
|
||||
@@ -123,6 +127,20 @@ def main():
|
||||
if opt == '-l':
|
||||
logger_level = int(arg) if arg.isdigit() else arg.upper()
|
||||
|
||||
|
||||
logger.success(f"changedetection.io version {get_version()} starting.")
|
||||
# Launch using SocketIO run method for proper integration (if enabled)
|
||||
ssl_cert_file = os.getenv("SSL_CERT_FILE", 'cert.pem')
|
||||
ssl_privkey_file = os.getenv("SSL_PRIVKEY_FILE", 'privkey.pem')
|
||||
if os.getenv("SSL_CERT_FILE") and os.getenv("SSL_PRIVKEY_FILE"):
|
||||
ssl_mode = True
|
||||
|
||||
# SSL mode could have been set by -s too, therefor fallback to default values
|
||||
if ssl_mode:
|
||||
if not os.path.isfile(ssl_cert_file) or not os.path.isfile(ssl_privkey_file):
|
||||
logger.critical(f"Cannot start SSL/HTTPS mode, Please be sure that {ssl_cert_file}' and '{ssl_privkey_file}' exist in in {os.getcwd()}")
|
||||
os._exit(2)
|
||||
|
||||
# Without this, a logger will be duplicated
|
||||
logger.remove()
|
||||
try:
|
||||
@@ -222,19 +240,19 @@ def main():
|
||||
|
||||
|
||||
# SocketIO instance is already initialized in flask_app.py
|
||||
|
||||
# Launch using SocketIO run method for proper integration (if enabled)
|
||||
if socketio_server:
|
||||
if ssl_mode:
|
||||
socketio.run(app, host=host, port=int(port), debug=False,
|
||||
certfile='cert.pem', keyfile='privkey.pem', allow_unsafe_werkzeug=True)
|
||||
logger.success(f"SSL mode enabled, attempting to start with '{ssl_cert_file}' and '{ssl_privkey_file}' in {os.getcwd()}")
|
||||
socketio.run(app, host=host, port=int(port), debug=False,
|
||||
ssl_context=(ssl_cert_file, ssl_privkey_file), allow_unsafe_werkzeug=True)
|
||||
else:
|
||||
socketio.run(app, host=host, port=int(port), debug=False, allow_unsafe_werkzeug=True)
|
||||
else:
|
||||
# Run Flask app without Socket.IO if disabled
|
||||
logger.info("Starting Flask app without Socket.IO server")
|
||||
if ssl_mode:
|
||||
app.run(host=host, port=int(port), debug=False,
|
||||
ssl_context=('cert.pem', 'privkey.pem'))
|
||||
logger.success(f"SSL mode enabled, attempting to start with '{ssl_cert_file}' and '{ssl_privkey_file}' in {os.getcwd()}")
|
||||
app.run(host=host, port=int(port), debug=False,
|
||||
ssl_context=(ssl_cert_file, ssl_privkey_file))
|
||||
else:
|
||||
app.run(host=host, port=int(port), debug=False)
|
||||
|
||||
@@ -5,7 +5,7 @@ from flask_expects_json import expects_json
|
||||
from changedetectionio import queuedWatchMetaData
|
||||
from changedetectionio import worker_handler
|
||||
from flask_restful import abort, Resource
|
||||
from flask import request, make_response
|
||||
from flask import request, make_response, send_from_directory
|
||||
import validators
|
||||
from . import auth
|
||||
import copy
|
||||
@@ -191,6 +191,47 @@ class WatchSingleHistory(Resource):
|
||||
|
||||
return response
|
||||
|
||||
class WatchFavicon(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
# datastore is a black box dependency
|
||||
self.datastore = kwargs['datastore']
|
||||
|
||||
@auth.check_token
|
||||
def get(self, uuid):
|
||||
"""
|
||||
@api {get} /api/v1/watch/<string:uuid>/favicon Get Favicon for a watch
|
||||
@apiDescription Requires watch `uuid`
|
||||
@apiExample {curl} Example usage:
|
||||
curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/favicon -H"x-api-key:813031b16330fe25e3780cf0325daa45"
|
||||
@apiName Get latest Favicon
|
||||
@apiGroup Watch History
|
||||
@apiSuccess (200) {String} OK
|
||||
@apiSuccess (404) {String} ERR Not found
|
||||
"""
|
||||
watch = self.datastore.data['watching'].get(uuid)
|
||||
if not watch:
|
||||
abort(404, message=f"No watch exists with the UUID of {uuid}")
|
||||
|
||||
favicon_filename = watch.get_favicon_filename()
|
||||
if favicon_filename:
|
||||
try:
|
||||
import magic
|
||||
mime = magic.from_file(
|
||||
os.path.join(watch.watch_data_dir, favicon_filename),
|
||||
mime=True
|
||||
)
|
||||
except ImportError:
|
||||
# Fallback, no python-magic
|
||||
import mimetypes
|
||||
mime, encoding = mimetypes.guess_type(favicon_filename)
|
||||
|
||||
response = make_response(send_from_directory(watch.watch_data_dir, favicon_filename))
|
||||
response.headers['Content-type'] = mime
|
||||
response.headers['Cache-Control'] = 'max-age=300, must-revalidate' # Cache for 5 minutes, then revalidate
|
||||
return response
|
||||
|
||||
abort(404, message=f'No Favicon available for {uuid}')
|
||||
|
||||
|
||||
class CreateWatch(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
|
||||
@@ -26,7 +26,7 @@ schema_delete_notification_urls = copy.deepcopy(schema_notification_urls)
|
||||
schema_delete_notification_urls['required'] = ['notification_urls']
|
||||
|
||||
# Import all API resources
|
||||
from .Watch import Watch, WatchHistory, WatchSingleHistory, CreateWatch
|
||||
from .Watch import Watch, WatchHistory, WatchSingleHistory, CreateWatch, WatchFavicon
|
||||
from .Tags import Tags, Tag
|
||||
from .Import import Import
|
||||
from .SystemInfo import SystemInfo
|
||||
|
||||
@@ -7,6 +7,7 @@ from changedetectionio.flask_app import watch_check_update
|
||||
import asyncio
|
||||
import importlib
|
||||
import os
|
||||
import queue
|
||||
import time
|
||||
|
||||
from loguru import logger
|
||||
@@ -37,13 +38,23 @@ async def async_update_worker(worker_id, q, notification_q, app, datastore):
|
||||
watch = None
|
||||
|
||||
try:
|
||||
# Use asyncio wait_for to make queue.get() cancellable
|
||||
queued_item_data = await asyncio.wait_for(q.get(), timeout=1.0)
|
||||
# Use native janus async interface - no threads needed!
|
||||
queued_item_data = await asyncio.wait_for(q.async_get(), timeout=1.0)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
# No jobs available, continue loop
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.error(f"Worker {worker_id} error getting queue item: {e}")
|
||||
logger.critical(f"CRITICAL: Worker {worker_id} failed to get queue item: {type(e).__name__}: {e}")
|
||||
|
||||
# Log queue health for debugging
|
||||
try:
|
||||
queue_size = q.qsize()
|
||||
is_empty = q.empty()
|
||||
logger.critical(f"CRITICAL: Worker {worker_id} queue health - size: {queue_size}, empty: {is_empty}")
|
||||
except Exception as health_e:
|
||||
logger.critical(f"CRITICAL: Worker {worker_id} queue health check failed: {health_e}")
|
||||
|
||||
await asyncio.sleep(0.1)
|
||||
continue
|
||||
|
||||
@@ -353,6 +364,12 @@ async def async_update_worker(worker_id, q, notification_q, app, datastore):
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
# Store favicon if necessary
|
||||
if update_handler.fetcher.favicon_blob and update_handler.fetcher.favicon_blob.get('base64'):
|
||||
watch.bump_favicon(url=update_handler.fetcher.favicon_blob.get('url'),
|
||||
favicon_base_64=update_handler.fetcher.favicon_blob.get('base64')
|
||||
)
|
||||
|
||||
datastore.update_watch(uuid=uuid, update_obj={'fetch_time': round(time.time() - fetch_start_time, 3),
|
||||
'check_count': count})
|
||||
|
||||
|
||||
@@ -132,7 +132,7 @@ class steppable_browser_interface():
|
||||
|
||||
# Incase they request to go back to the start
|
||||
async def action_goto_site(self, selector=None, value=None):
|
||||
return await self.action_goto_url(value=self.start_url)
|
||||
return await self.action_goto_url(value=re.sub(r'^source:', '', self.start_url, flags=re.IGNORECASE))
|
||||
|
||||
async def action_click_element_containing_text(self, selector=None, value=''):
|
||||
logger.debug("Clicking element containing text")
|
||||
|
||||
@@ -256,6 +256,11 @@ nav
|
||||
{{ render_checkbox_field(form.application.form.ui.form.socket_io_enabled, class="socket_io_enabled") }}
|
||||
<span class="pure-form-message-inline">Realtime UI Updates Enabled - (Restart required if this is changed)</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_checkbox_field(form.application.form.ui.form.favicons_enabled, class="") }}
|
||||
<span class="pure-form-message-inline">Enable or Disable Favicons next to the watch list</span>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<div class="tab-pane-inner" id="proxies">
|
||||
<div id="recommended-proxy">
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
<legend>Add a new organisational tag</legend>
|
||||
<div id="watch-add-wrapper-zone">
|
||||
<div>
|
||||
{{ render_simple_field(form.name, placeholder="watch label / tag") }}
|
||||
{{ render_simple_field(form.name, placeholder="Watch group / tag") }}
|
||||
</div>
|
||||
<div>
|
||||
{{ render_simple_field(form.save_button, title="Save" ) }}
|
||||
|
||||
@@ -159,12 +159,20 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, worker_handle
|
||||
def mark_all_viewed():
|
||||
# Save the current newest history as the most recently viewed
|
||||
with_errors = request.args.get('with_errors') == "1"
|
||||
tag_limit = request.args.get('tag')
|
||||
logger.debug(f"Limiting to tag {tag_limit}")
|
||||
now = int(time.time())
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if with_errors and not watch.get('last_error'):
|
||||
continue
|
||||
datastore.set_last_viewed(watch_uuid, int(time.time()))
|
||||
|
||||
return redirect(url_for('watchlist.index'))
|
||||
if tag_limit and ( not watch.get('tags') or tag_limit not in watch['tags'] ):
|
||||
logger.debug(f"Skipping watch {watch_uuid}")
|
||||
continue
|
||||
|
||||
datastore.set_last_viewed(watch_uuid, now)
|
||||
|
||||
return redirect(url_for('watchlist.index', tag=tag_limit))
|
||||
|
||||
@ui_blueprint.route("/delete", methods=['GET'])
|
||||
@login_optionally_required
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
from flask import Blueprint, request, redirect, url_for, flash, render_template, make_response, send_from_directory, abort
|
||||
from flask_login import current_user
|
||||
import os
|
||||
import time
|
||||
from copy import deepcopy
|
||||
from loguru import logger
|
||||
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
from changedetectionio.auth_decorator import login_optionally_required
|
||||
@@ -78,9 +77,46 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
|
||||
return output
|
||||
|
||||
@views_blueprint.route("/diff/<string:uuid>", methods=['GET', 'POST'])
|
||||
@views_blueprint.route("/diff/<string:uuid>", methods=['POST'])
|
||||
@login_optionally_required
|
||||
def diff_history_page(uuid):
|
||||
def diff_history_page_build_report(uuid):
|
||||
from changedetectionio import forms
|
||||
|
||||
# More for testing, possible to return the first/only
|
||||
if uuid == 'first':
|
||||
uuid = list(datastore.data['watching'].keys()).pop()
|
||||
|
||||
try:
|
||||
watch = datastore.data['watching'][uuid]
|
||||
except KeyError:
|
||||
flash("No history found for the specified link, bad link?", "error")
|
||||
return redirect(url_for('watchlist.index'))
|
||||
|
||||
# For submission of requesting an extract
|
||||
extract_form = forms.extractDataForm(formdata=request.form,
|
||||
data={'extract_regex': request.form.get('extract_regex', '')}
|
||||
)
|
||||
if not extract_form.validate():
|
||||
flash("An error occurred, please see below.", "error")
|
||||
return _render_diff_template(uuid, extract_form)
|
||||
|
||||
else:
|
||||
extract_regex = request.form.get('extract_regex', '').strip()
|
||||
output = watch.extract_regex_from_all_history(extract_regex)
|
||||
if output:
|
||||
watch_dir = os.path.join(datastore.datastore_path, uuid)
|
||||
response = make_response(send_from_directory(directory=watch_dir, path=output, as_attachment=True))
|
||||
response.headers['Content-type'] = 'text/csv'
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
response.headers['Expires'] = "0"
|
||||
return response
|
||||
|
||||
flash('No matches found while scanning all of the watch history for that RegEx.', 'error')
|
||||
return redirect(url_for('ui.ui_views.diff_history_page', uuid=uuid) + '#extract')
|
||||
|
||||
def _render_diff_template(uuid, extract_form=None):
|
||||
"""Helper function to render the diff template with all required data"""
|
||||
from changedetectionio import forms
|
||||
|
||||
# More for testing, possible to return the first/only
|
||||
@@ -94,62 +130,36 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
flash("No history found for the specified link, bad link?", "error")
|
||||
return redirect(url_for('watchlist.index'))
|
||||
|
||||
# For submission of requesting an extract
|
||||
extract_form = forms.extractDataForm(request.form)
|
||||
if request.method == 'POST':
|
||||
if not extract_form.validate():
|
||||
flash("An error occurred, please see below.", "error")
|
||||
|
||||
else:
|
||||
extract_regex = request.form.get('extract_regex').strip()
|
||||
output = watch.extract_regex_from_all_history(extract_regex)
|
||||
if output:
|
||||
watch_dir = os.path.join(datastore.datastore_path, uuid)
|
||||
response = make_response(send_from_directory(directory=watch_dir, path=output, as_attachment=True))
|
||||
response.headers['Content-type'] = 'text/csv'
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
response.headers['Expires'] = 0
|
||||
return response
|
||||
|
||||
flash('Nothing matches that RegEx', 'error')
|
||||
redirect(url_for('ui_views.diff_history_page', uuid=uuid)+'#extract')
|
||||
# Use provided form or create a new one
|
||||
if extract_form is None:
|
||||
extract_form = forms.extractDataForm(formdata=request.form,
|
||||
data={'extract_regex': request.form.get('extract_regex', '')}
|
||||
)
|
||||
|
||||
history = watch.history
|
||||
dates = list(history.keys())
|
||||
|
||||
if len(dates) < 2:
|
||||
flash("Not enough saved change detection snapshots to produce a report.", "error")
|
||||
return redirect(url_for('watchlist.index'))
|
||||
# If a "from_version" was requested, then find it (or the closest one)
|
||||
# Also set "from version" to be the closest version to the one that was last viewed.
|
||||
|
||||
# Save the current newest history as the most recently viewed
|
||||
datastore.set_last_viewed(uuid, time.time())
|
||||
best_last_viewed_timestamp = watch.get_from_version_based_on_last_viewed
|
||||
from_version_timestamp = best_last_viewed_timestamp if best_last_viewed_timestamp else dates[-2]
|
||||
from_version = request.args.get('from_version', from_version_timestamp )
|
||||
|
||||
# Read as binary and force decode as UTF-8
|
||||
# Windows may fail decode in python if we just use 'r' mode (chardet decode exception)
|
||||
from_version = request.args.get('from_version')
|
||||
from_version_index = -2 # second newest
|
||||
if from_version and from_version in dates:
|
||||
from_version_index = dates.index(from_version)
|
||||
else:
|
||||
from_version = dates[from_version_index]
|
||||
# Use the current one if nothing was specified
|
||||
to_version = request.args.get('to_version', str(dates[-1]))
|
||||
|
||||
try:
|
||||
from_version_file_contents = watch.get_history_snapshot(dates[from_version_index])
|
||||
to_version_file_contents = watch.get_history_snapshot(timestamp=to_version)
|
||||
except Exception as e:
|
||||
from_version_file_contents = f"Unable to read to-version at index {dates[from_version_index]}.\n"
|
||||
|
||||
to_version = request.args.get('to_version')
|
||||
to_version_index = -1
|
||||
if to_version and to_version in dates:
|
||||
to_version_index = dates.index(to_version)
|
||||
else:
|
||||
to_version = dates[to_version_index]
|
||||
logger.error(f"Unable to read watch history to-version for version {to_version}: {str(e)}")
|
||||
to_version_file_contents = f"Unable to read to-version at {to_version}.\n"
|
||||
|
||||
try:
|
||||
to_version_file_contents = watch.get_history_snapshot(dates[to_version_index])
|
||||
from_version_file_contents = watch.get_history_snapshot(timestamp=from_version)
|
||||
except Exception as e:
|
||||
to_version_file_contents = "Unable to read to-version at index{}.\n".format(dates[to_version_index])
|
||||
logger.error(f"Unable to read watch history from-version for version {from_version}: {str(e)}")
|
||||
from_version_file_contents = f"Unable to read to-version {from_version}.\n"
|
||||
|
||||
screenshot_url = watch.get_screenshot()
|
||||
|
||||
@@ -163,7 +173,9 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
if datastore.data['settings']['application'].get('password') or os.getenv("SALTED_PASS", False):
|
||||
password_enabled_and_share_is_off = not datastore.data['settings']['application'].get('shared_diff_access')
|
||||
|
||||
output = render_template("diff.html",
|
||||
datastore.set_last_viewed(uuid, time.time())
|
||||
|
||||
return render_template("diff.html",
|
||||
current_diff_url=watch['url'],
|
||||
from_version=str(from_version),
|
||||
to_version=str(to_version),
|
||||
@@ -186,7 +198,10 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
watch_a=watch
|
||||
)
|
||||
|
||||
return output
|
||||
@views_blueprint.route("/diff/<string:uuid>", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def diff_history_page(uuid):
|
||||
return _render_diff_template(uuid)
|
||||
|
||||
@views_blueprint.route("/form/add/quickwatch", methods=['POST'])
|
||||
@login_optionally_required
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
||||
<script src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script>
|
||||
<script>let nowtimeserver={{ now_time_server }};</script>
|
||||
<script>let favicon_baseURL="{{ url_for('static_content', group='favicon', filename="PLACEHOLDER")}}";</script>
|
||||
<script>
|
||||
// Initialize Feather icons after the page loads
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
@@ -18,19 +19,20 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
transition: background-size 0.9s ease
|
||||
}
|
||||
</style>
|
||||
<div class="box">
|
||||
<div class="box" id="form-quick-watch-add">
|
||||
|
||||
<form class="pure-form" action="{{ url_for('ui.ui_views.form_quick_watch_add', tag=active_tag_uuid) }}" method="POST" id="new-watch-form">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||
<fieldset>
|
||||
<legend>Add a new change detection watch</legend>
|
||||
<legend>Add a new web page change detection watch</legend>
|
||||
<div id="watch-add-wrapper-zone">
|
||||
|
||||
{{ render_nolabel_field(form.url, placeholder="https://...", required=true) }}
|
||||
{{ render_nolabel_field(form.tags, value=active_tag.title if active_tag_uuid else '', placeholder="watch label / tag") }}
|
||||
{{ render_nolabel_field(form.watch_submit_button, title="Watch this URL!" ) }}
|
||||
{{ render_nolabel_field(form.edit_and_watch_submit_button, title="Edit first then Watch") }}
|
||||
</div>
|
||||
<div id="watch-group-tag">
|
||||
{{ render_field(form.tags, value=active_tag.title if active_tag_uuid else '', placeholder="Watch group / tag", class="transparent-field") }}
|
||||
</div>
|
||||
<div id="quick-watch-processor-type">
|
||||
{{ render_simple_field(form.processor) }}
|
||||
</div>
|
||||
@@ -38,7 +40,8 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
</fieldset>
|
||||
<span style="color:#eee; font-size: 80%;"><img alt="Create a shareable link" style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread-white.svg')}}" > Tip: You can also add 'shared' watches. <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Sharing-a-Watch">More info</a></span>
|
||||
</form>
|
||||
|
||||
</div>
|
||||
<div class="box">
|
||||
<form class="pure-form" action="{{ url_for('ui.form_watch_list_checkbox_operations') }}" method="POST" id="watch-list-form">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||
<input type="hidden" id="op_extradata" name="op_extradata" value="" >
|
||||
@@ -78,16 +81,23 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
{%- if any_has_restock_price_processor -%}
|
||||
{%- set cols_required = cols_required + 1 -%}
|
||||
{%- endif -%}
|
||||
{%- set ui_settings = datastore.data['settings']['application']['ui'] -%}
|
||||
|
||||
<div id="watch-table-wrapper">
|
||||
|
||||
<table class="pure-table pure-table-striped watch-table">
|
||||
{%- set table_classes = [
|
||||
'favicon-enabled' if 'favicons_enabled' not in ui_settings or ui_settings['favicons_enabled'] else 'favicon-not-enabled',
|
||||
] -%}
|
||||
<table class="pure-table pure-table-striped watch-table {{ table_classes | reject('equalto', '') | join(' ') }}">
|
||||
<thead>
|
||||
<tr>
|
||||
{%- set link_order = "desc" if sort_order == 'asc' else "asc" -%}
|
||||
{%- set arrow_span = "" -%}
|
||||
<th><input style="vertical-align: middle" type="checkbox" id="check-all" > <a class="{{ 'active '+link_order if sort_attribute == 'date_created' else 'inactive' }}" href="{{url_for('watchlist.index', sort='date_created', order=link_order, tag=active_tag_uuid)}}"># <span class='arrow {{link_order}}'></span></a></th>
|
||||
<th class="empty-cell"></th>
|
||||
<th>
|
||||
<a class="{{ 'active '+link_order if sort_attribute == 'paused' else 'inactive' }}" href="{{url_for('watchlist.index', sort='paused', order=link_order, tag=active_tag_uuid)}}"><i data-feather="pause" style="vertical-align: bottom; width: 14px; height: 14px; margin-right: 4px;"></i><span class='arrow {{link_order}}'></span></a>
|
||||
|
||||
<a class="{{ 'active '+link_order if sort_attribute == 'notification_muted' else 'inactive' }}" href="{{url_for('watchlist.index', sort='notification_muted', order=link_order, tag=active_tag_uuid)}}"><i data-feather="volume-2" style="vertical-align: bottom; width: 14px; height: 14px; margin-right: 4px;"></i><span class='arrow {{link_order}}'></span></a>
|
||||
</th>
|
||||
<th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('watchlist.index', sort='label', order=link_order, tag=active_tag_uuid)}}">Website <span class='arrow {{link_order}}'></span></a></th>
|
||||
{%- if any_has_restock_price_processor -%}
|
||||
<th>Restock & Price</th>
|
||||
@@ -103,9 +113,11 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
<td colspan="{{ cols_required }}" style="text-wrap: wrap;">No website watches configured, please add a URL in the box above, or <a href="{{ url_for('imports.import_page')}}" >import a list</a>.</td>
|
||||
</tr>
|
||||
{%- endif -%}
|
||||
|
||||
{%- for watch in (watches|sort(attribute=sort_attribute, reverse=sort_order == 'asc'))|pagination_slice(skip=pagination.skip) -%}
|
||||
{%- set checking_now = is_checking_now(watch) -%}
|
||||
{%- set history_n = watch.history_n -%}
|
||||
{%- set favicon = watch.get_favicon_filename() -%}
|
||||
{# Mirror in changedetectionio/static/js/realtime.js for the frontend #}
|
||||
{%- set row_classes = [
|
||||
loop.cycle('pure-table-odd', 'pure-table-even'),
|
||||
@@ -114,49 +126,63 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
'paused' if watch.paused is defined and watch.paused != False else '',
|
||||
'unviewed' if watch.has_unviewed else '',
|
||||
'has-restock-info' if watch.has_restock_info else 'no-restock-info',
|
||||
'has-favicon' if favicon else '',
|
||||
'in-stock' if watch.has_restock_info and watch['restock']['in_stock'] else '',
|
||||
'not-in-stock' if watch.has_restock_info and not watch['restock']['in_stock'] else '',
|
||||
'queued' if watch.uuid in queued_uuids else '',
|
||||
'checking-now' if checking_now else '',
|
||||
'notification_muted' if watch.notification_muted else '',
|
||||
'single-history' if history_n == 1 else '',
|
||||
'multiple-history' if history_n >= 2 else ''
|
||||
'multiple-history' if history_n >= 2 else '',
|
||||
] -%}
|
||||
<tr id="{{ watch.uuid }}" data-watch-uuid="{{ watch.uuid }}" class="{{ row_classes | reject('equalto', '') | join(' ') }}">
|
||||
<td class="inline checkbox-uuid" ><input name="uuids" type="checkbox" value="{{ watch.uuid}} " > <span>{{ loop.index+pagination.skip }}</span></td>
|
||||
<td class="inline checkbox-uuid" ><div><input name="uuids" type="checkbox" value="{{ watch.uuid}} " > <span class="counter-i">{{ loop.index+pagination.skip }}</span></div></td>
|
||||
<td class="inline watch-controls">
|
||||
<div>
|
||||
<a class="ajax-op state-off pause-toggle" data-op="pause" href="{{url_for('watchlist.index', op='pause', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause checks" title="Pause checks" class="icon icon-pause" ></a>
|
||||
<a class="ajax-op state-on pause-toggle" data-op="pause" style="display: none" href="{{url_for('watchlist.index', op='pause', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='play.svg')}}" alt="UnPause checks" title="UnPause checks" class="icon icon-unpause" ></a>
|
||||
<a class="ajax-op state-off mute-toggle" data-op="mute" href="{{url_for('watchlist.index', op='mute', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notification" title="Mute notification" class="icon icon-mute" ></a>
|
||||
<a class="ajax-op state-on mute-toggle" data-op="mute" style="display: none" href="{{url_for('watchlist.index', op='mute', uuid=watch.uuid, tag=active_tag_uuid)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="UnMute notification" title="UnMute notification" class="icon icon-mute" ></a>
|
||||
</div>
|
||||
</td>
|
||||
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
|
||||
<a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}"> </a>
|
||||
<a class="link-spread" href="{{url_for('ui.form_share_put_watch', uuid=watch.uuid)}}"><img src="{{url_for('static_content', group='images', filename='spread.svg')}}" class="status-icon icon icon-spread" title="Create a link to share watch config with others" ></a>
|
||||
|
||||
{%- if watch.get_fetch_backend == "html_webdriver"
|
||||
or ( watch.get_fetch_backend == "system" and system_default_fetcher == 'html_webdriver' )
|
||||
or "extra_browser_" in watch.get_fetch_backend
|
||||
-%}
|
||||
<img class="status-icon" src="{{url_for('static_content', group='images', filename='google-chrome-icon.png')}}" alt="Using a Chrome browser" title="Using a Chrome browser" >
|
||||
{%- endif -%}
|
||||
<td class="title-col inline">
|
||||
<div class="flex-wrapper">
|
||||
{% if 'favicons_enabled' not in ui_settings or ui_settings['favicons_enabled'] %}
|
||||
<div>{# A page might have hundreds of these images, set IMG options for lazy loading, don't set SRC if we dont have it so it doesnt fetch the placeholder' #}
|
||||
<img alt="Favicon thumbnail" class="favicon" loading="lazy" decoding="async" fetchpriority="low" {% if favicon %} src="{{url_for('static_content', group='favicon', filename=watch.uuid)}}" {% else %} src='data:image/svg+xml;utf8,%3Csvg xmlns="http://www.w3.org/2000/svg" width="7.087" height="7.087" viewBox="0 0 7.087 7.087"%3E%3Ccircle cx="3.543" cy="3.543" r="3.279" stroke="%23e1e1e1" stroke-width="0.45" fill="none" opacity="0.74"/%3E%3C/svg%3E' {% endif %} />
|
||||
</div>
|
||||
{% endif %}
|
||||
<div>
|
||||
<span class="watch-title">
|
||||
{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}} <a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}"> </a>
|
||||
</span>
|
||||
<div class="error-text" style="display:none;">{{ watch.compile_error_texts(has_proxies=datastore.proxy_list) }}</div>
|
||||
{%- if watch['processor'] == 'text_json_diff' -%}
|
||||
{%- if watch['has_ldjson_price_data'] and not watch['track_ldjson_price_data'] -%}
|
||||
<div class="ldjson-price-track-offer">Switch to Restock & Price watch mode? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
{%- if watch['processor'] == 'restock_diff' -%}
|
||||
<span class="tracking-ldjson-price-data" title="Automatically following embedded price information"><img src="{{url_for('static_content', group='images', filename='price-tag-icon.svg')}}" class="status-icon price-follow-tag-icon" > Price</span>
|
||||
{%- endif -%}
|
||||
{%- for watch_tag_uuid, watch_tag in datastore.get_all_tags_for_watch(watch['uuid']).items() -%}
|
||||
<span class="watch-tag-list">{{ watch_tag.title }}</span>
|
||||
{%- endfor -%}
|
||||
</div>
|
||||
<div class="status-icons">
|
||||
<a class="link-spread" href="{{url_for('ui.form_share_put_watch', uuid=watch.uuid)}}"><img src="{{url_for('static_content', group='images', filename='spread.svg')}}" class="status-icon icon icon-spread" title="Create a link to share watch config with others" ></a>
|
||||
{%- if watch.get_fetch_backend == "html_webdriver"
|
||||
or ( watch.get_fetch_backend == "system" and system_default_fetcher == 'html_webdriver' )
|
||||
or "extra_browser_" in watch.get_fetch_backend
|
||||
-%}
|
||||
<img class="status-icon" src="{{url_for('static_content', group='images', filename='google-chrome-icon.png')}}" alt="Using a Chrome browser" title="Using a Chrome browser" >
|
||||
{%- endif -%}
|
||||
{%- if watch.is_pdf -%}<img class="status-icon" src="{{url_for('static_content', group='images', filename='pdf-icon.svg')}}" alt="Converting PDF to text" >{%- endif -%}
|
||||
{%- if watch.has_browser_steps -%}<img class="status-icon status-browsersteps" src="{{url_for('static_content', group='images', filename='steps.svg')}}" alt="Browser Steps is enabled" >{%- endif -%}
|
||||
|
||||
{%- if watch.is_pdf -%}<img class="status-icon" src="{{url_for('static_content', group='images', filename='pdf-icon.svg')}}" alt="Converting PDF to text" >{%- endif -%}
|
||||
{%- if watch.has_browser_steps -%}<img class="status-icon status-browsersteps" src="{{url_for('static_content', group='images', filename='steps.svg')}}" alt="Browser Steps is enabled" >{%- endif -%}
|
||||
|
||||
<div class="error-text" style="display:none;">{{ watch.compile_error_texts(has_proxies=datastore.proxy_list)|safe }}</div>
|
||||
|
||||
{%- if watch['processor'] == 'text_json_diff' -%}
|
||||
{%- if watch['has_ldjson_price_data'] and not watch['track_ldjson_price_data'] -%}
|
||||
<div class="ldjson-price-track-offer">Switch to Restock & Price watch mode? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
{%- if watch['processor'] == 'restock_diff' -%}
|
||||
<span class="tracking-ldjson-price-data" title="Automatically following embedded price information"><img src="{{url_for('static_content', group='images', filename='price-tag-icon.svg')}}" class="status-icon price-follow-tag-icon" > Price</span>
|
||||
{%- endif -%}
|
||||
{%- for watch_tag_uuid, watch_tag in datastore.get_all_tags_for_watch(watch['uuid']).items() -%}
|
||||
<span class="watch-tag-list">{{ watch_tag.title }}</span>
|
||||
{%- endfor -%}
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
{%- if any_has_restock_price_processor -%}
|
||||
<td class="restock-and-price">
|
||||
@@ -193,13 +219,15 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
Not yet
|
||||
{%- endif -%}
|
||||
</td>
|
||||
<td>
|
||||
<td class="buttons">
|
||||
<div>
|
||||
{%- set target_attr = ' target="' ~ watch.uuid ~ '"' if datastore.data['settings']['application']['ui'].get('open_diff_in_new_tab') else '' -%}
|
||||
<a href="" class="already-in-queue-button recheck pure-button pure-button-primary" style="display: none;" disabled="disabled">Queued</a>
|
||||
<a href="{{ url_for('ui.form_watch_checknow', uuid=watch.uuid, tag=request.args.get('tag')) }}" data-op='recheck' class="ajax-op recheck pure-button pure-button-primary">Recheck</a>
|
||||
<a href="{{ url_for('ui.ui_edit.edit_page', uuid=watch.uuid, tag=active_tag_uuid)}}#general" class="pure-button pure-button-primary">Edit</a>
|
||||
<a href="{{ url_for('ui.ui_views.diff_history_page', uuid=watch.uuid)}}" {{target_attr}} class="pure-button pure-button-primary history-link" style="display: none;">History</a>
|
||||
<a href="{{ url_for('ui.ui_views.preview_page', uuid=watch.uuid)}}" {{target_attr}} class="pure-button pure-button-primary preview-link" style="display: none;">Preview</a>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{%- endfor -%}
|
||||
@@ -212,9 +240,14 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
<li id="post-list-mark-views" class="{%- if has_unviewed -%}has-unviewed{%- endif -%}" style="display: none;" >
|
||||
<a href="{{url_for('ui.mark_all_viewed',with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag " id="mark-all-viewed">Mark all viewed</a>
|
||||
</li>
|
||||
{%- if active_tag_uuid -%}
|
||||
<li id="post-list-mark-views-tag">
|
||||
<a href="{{url_for('ui.mark_all_viewed', tag=active_tag_uuid) }}" class="pure-button button-tag " id="mark-all-viewed">Mark all viewed in '{{active_tag.title}}'</a>
|
||||
</li>
|
||||
{%- endif -%}
|
||||
<li>
|
||||
<a href="{{ url_for('ui.form_watch_checknow', tag=active_tag_uuid, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag" id="recheck-all">Recheck
|
||||
all {%- if active_tag_uuid-%} in "{{active_tag.title}}"{%endif%}</a>
|
||||
all {% if active_tag_uuid %} in '{{active_tag.title}}'{%endif%}</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ url_for('rss.feed', tag=active_tag_uuid, token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='generic_feed-icon.svg')}}" height="15"></a>
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from flask import Blueprint
|
||||
|
||||
from json_logic.builtins import BUILTINS
|
||||
|
||||
from .exceptions import EmptyConditionRuleRowNotUsable
|
||||
@@ -16,7 +14,6 @@ operator_choices = [
|
||||
("==", "Equals"),
|
||||
("!=", "Not Equals"),
|
||||
("in", "Contains"),
|
||||
("!in", "Does Not Contain"),
|
||||
]
|
||||
|
||||
# Fields available in the rules
|
||||
|
||||
@@ -21,17 +21,21 @@ def register_operators():
|
||||
def length_max(_, text, strlen):
|
||||
return len(text) <= int(strlen)
|
||||
|
||||
# ✅ Custom function for case-insensitive regex matching
|
||||
# Custom function for case-insensitive regex matching
|
||||
def contains_regex(_, text, pattern):
|
||||
"""Returns True if `text` contains `pattern` (case-insensitive regex match)."""
|
||||
return bool(re.search(pattern, str(text), re.IGNORECASE))
|
||||
|
||||
# ✅ Custom function for NOT matching case-insensitive regex
|
||||
# Custom function for NOT matching case-insensitive regex
|
||||
def not_contains_regex(_, text, pattern):
|
||||
"""Returns True if `text` does NOT contain `pattern` (case-insensitive regex match)."""
|
||||
return not bool(re.search(pattern, str(text), re.IGNORECASE))
|
||||
|
||||
def not_contains(_, text, pattern):
|
||||
return not pattern in text
|
||||
|
||||
return {
|
||||
"!in": not_contains,
|
||||
"!contains_regex": not_contains_regex,
|
||||
"contains_regex": contains_regex,
|
||||
"ends_with": ends_with,
|
||||
@@ -43,6 +47,7 @@ def register_operators():
|
||||
@hookimpl
|
||||
def register_operator_choices():
|
||||
return [
|
||||
("!in", "Does NOT Contain"),
|
||||
("starts_with", "Text Starts With"),
|
||||
("ends_with", "Text Ends With"),
|
||||
("length_min", "Length minimum"),
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import pluggy
|
||||
from loguru import logger
|
||||
|
||||
LEVENSHTEIN_MAX_LEN_FOR_EDIT_STATS=100000
|
||||
|
||||
# Support both plugin systems
|
||||
conditions_hookimpl = pluggy.HookimplMarker("changedetectionio_conditions")
|
||||
global_hookimpl = pluggy.HookimplMarker("changedetectionio")
|
||||
@@ -72,7 +74,17 @@ def ui_edit_stats_extras(watch):
|
||||
"""Generate the HTML for Levenshtein stats - shared by both plugin systems"""
|
||||
if len(watch.history.keys()) < 2:
|
||||
return "<p>Not enough history to calculate Levenshtein metrics</p>"
|
||||
|
||||
|
||||
|
||||
# Protection against the algorithm getting stuck on huge documents
|
||||
k = list(watch.history.keys())
|
||||
if any(
|
||||
len(watch.get_history_snapshot(timestamp=k[idx])) > LEVENSHTEIN_MAX_LEN_FOR_EDIT_STATS
|
||||
for idx in (-1, -2)
|
||||
if len(k) >= abs(idx)
|
||||
):
|
||||
return "<p>Snapshot too large for edit statistics, skipping.</p>"
|
||||
|
||||
try:
|
||||
lev_data = levenshtein_ratio_recent_history(watch)
|
||||
if not lev_data or not isinstance(lev_data, dict):
|
||||
|
||||
@@ -28,6 +28,7 @@ from changedetectionio.content_fetchers.requests import fetcher as html_requests
|
||||
import importlib.resources
|
||||
XPATH_ELEMENT_JS = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('xpath_element_scraper.js').read_text(encoding='utf-8')
|
||||
INSTOCK_DATA_JS = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('stock-not-in-stock.js').read_text(encoding='utf-8')
|
||||
FAVICON_FETCHER_JS = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('favicon-fetcher.js').read_text(encoding='utf-8')
|
||||
|
||||
|
||||
def available_fetchers():
|
||||
|
||||
@@ -48,6 +48,7 @@ class Fetcher():
|
||||
error = None
|
||||
fetcher_description = "No description"
|
||||
headers = {}
|
||||
favicon_blob = None
|
||||
instock_data = None
|
||||
instock_data_js = ""
|
||||
status_code = None
|
||||
|
||||
@@ -5,7 +5,7 @@ from urllib.parse import urlparse
|
||||
from loguru import logger
|
||||
|
||||
from changedetectionio.content_fetchers import SCREENSHOT_MAX_HEIGHT_DEFAULT, visualselector_xpath_selectors, \
|
||||
SCREENSHOT_SIZE_STITCH_THRESHOLD, SCREENSHOT_MAX_TOTAL_HEIGHT, XPATH_ELEMENT_JS, INSTOCK_DATA_JS
|
||||
SCREENSHOT_SIZE_STITCH_THRESHOLD, SCREENSHOT_MAX_TOTAL_HEIGHT, XPATH_ELEMENT_JS, INSTOCK_DATA_JS, FAVICON_FETCHER_JS
|
||||
from changedetectionio.content_fetchers.base import Fetcher, manage_user_agent
|
||||
from changedetectionio.content_fetchers.exceptions import PageUnloadable, Non200ErrorCodeReceived, EmptyReply, ScreenshotUnavailable
|
||||
|
||||
@@ -234,6 +234,12 @@ class fetcher(Fetcher):
|
||||
await browser.close()
|
||||
raise PageUnloadable(url=url, status_code=None, message=str(e))
|
||||
|
||||
try:
|
||||
self.favicon_blob = await self.page.evaluate(FAVICON_FETCHER_JS)
|
||||
await self.page.request_gc()
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching FavIcon info {str(e)}, continuing.")
|
||||
|
||||
if self.status_code != 200 and not ignore_status_codes:
|
||||
screenshot = await capture_full_page_async(self.page)
|
||||
raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, screenshot=screenshot)
|
||||
@@ -274,6 +280,7 @@ class fetcher(Fetcher):
|
||||
await self.page.request_gc()
|
||||
logger.debug(f"Scrape xPath element data in browser done in {time.time() - now:.2f}s")
|
||||
|
||||
|
||||
# Bug 3 in Playwright screenshot handling
|
||||
# Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it
|
||||
# JPEG is better here because the screenshots can be very very large
|
||||
|
||||
@@ -8,7 +8,7 @@ from loguru import logger
|
||||
|
||||
from changedetectionio.content_fetchers import SCREENSHOT_MAX_HEIGHT_DEFAULT, visualselector_xpath_selectors, \
|
||||
SCREENSHOT_SIZE_STITCH_THRESHOLD, SCREENSHOT_DEFAULT_QUALITY, XPATH_ELEMENT_JS, INSTOCK_DATA_JS, \
|
||||
SCREENSHOT_MAX_TOTAL_HEIGHT
|
||||
SCREENSHOT_MAX_TOTAL_HEIGHT, FAVICON_FETCHER_JS
|
||||
from changedetectionio.content_fetchers.base import Fetcher, manage_user_agent
|
||||
from changedetectionio.content_fetchers.exceptions import PageUnloadable, Non200ErrorCodeReceived, EmptyReply, BrowserFetchTimedOut, \
|
||||
BrowserConnectError
|
||||
@@ -179,10 +179,8 @@ class fetcher(Fetcher):
|
||||
except Exception as e:
|
||||
raise BrowserConnectError(msg=f"Error connecting to the browser - Exception '{str(e)}'")
|
||||
|
||||
# Better is to launch chrome with the URL as arg
|
||||
# non-headless - newPage() will launch an extra tab/window, .browser should already contain 1 page/tab
|
||||
# headless - ask a new page
|
||||
self.page = (pages := await browser.pages) and len(pages) or await browser.newPage()
|
||||
# more reliable is to just request a new page
|
||||
self.page = await browser.newPage()
|
||||
|
||||
if '--window-size' in self.browser_connection_url:
|
||||
# Be sure the viewport is always the window-size, this is often not the same thing
|
||||
@@ -292,6 +290,11 @@ class fetcher(Fetcher):
|
||||
await browser.close()
|
||||
raise PageUnloadable(url=url, status_code=None, message=str(e))
|
||||
|
||||
try:
|
||||
self.favicon_blob = await self.page.evaluate(FAVICON_FETCHER_JS)
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching FavIcon info {str(e)}, continuing.")
|
||||
|
||||
if self.status_code != 200 and not ignore_status_codes:
|
||||
screenshot = await capture_full_page(page=self.page)
|
||||
|
||||
|
||||
79
changedetectionio/content_fetchers/res/favicon-fetcher.js
Normal file
79
changedetectionio/content_fetchers/res/favicon-fetcher.js
Normal file
@@ -0,0 +1,79 @@
|
||||
(async () => {
|
||||
const links = Array.from(document.querySelectorAll(
|
||||
'link[rel~="apple-touch-icon"], link[rel~="icon"]'
|
||||
));
|
||||
|
||||
const icons = links.map(link => {
|
||||
const sizesStr = link.getAttribute('sizes');
|
||||
let size = 0;
|
||||
if (sizesStr) {
|
||||
const [w] = sizesStr.split('x').map(Number);
|
||||
if (!isNaN(w)) size = w;
|
||||
} else {
|
||||
size = 16;
|
||||
}
|
||||
return {
|
||||
size,
|
||||
rel: link.getAttribute('rel'),
|
||||
href: link.href
|
||||
};
|
||||
});
|
||||
|
||||
// If no icons found, add fallback favicon.ico
|
||||
if (icons.length === 0) {
|
||||
icons.push({
|
||||
size: 16,
|
||||
rel: 'icon',
|
||||
href: '/favicon.ico'
|
||||
});
|
||||
}
|
||||
|
||||
// sort preference
|
||||
icons.sort((a, b) => {
|
||||
const isAppleA = /apple-touch-icon/.test(a.rel);
|
||||
const isAppleB = /apple-touch-icon/.test(b.rel);
|
||||
if (isAppleA && !isAppleB) return -1;
|
||||
if (!isAppleA && isAppleB) return 1;
|
||||
return b.size - a.size;
|
||||
});
|
||||
|
||||
const timeoutMs = 2000;
|
||||
|
||||
for (const icon of icons) {
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), timeoutMs);
|
||||
|
||||
const resp = await fetch(icon.href, {
|
||||
signal: controller.signal,
|
||||
redirect: 'follow'
|
||||
});
|
||||
|
||||
clearTimeout(timeout);
|
||||
|
||||
if (!resp.ok) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const blob = await resp.blob();
|
||||
|
||||
// Convert blob to base64
|
||||
const reader = new FileReader();
|
||||
return await new Promise(resolve => {
|
||||
reader.onloadend = () => {
|
||||
resolve({
|
||||
url: icon.href,
|
||||
base64: reader.result.split(",")[1]
|
||||
});
|
||||
};
|
||||
reader.readAsDataURL(blob);
|
||||
});
|
||||
|
||||
} catch (e) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// nothing found
|
||||
return null;
|
||||
})();
|
||||
@@ -17,6 +17,7 @@ async () => {
|
||||
'back in stock soon',
|
||||
'back-order or out of stock',
|
||||
'backordered',
|
||||
'backorder',
|
||||
'benachrichtigt mich', // notify me
|
||||
'binnenkort leverbaar', // coming soon
|
||||
'brak na stanie',
|
||||
@@ -39,6 +40,7 @@ async () => {
|
||||
'mail me when available',
|
||||
'message if back in stock',
|
||||
'mevcut değil',
|
||||
'more on order',
|
||||
'nachricht bei',
|
||||
'nicht auf lager',
|
||||
'nicht lagernd',
|
||||
|
||||
@@ -12,19 +12,17 @@ from blinker import signal
|
||||
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from threading import Event
|
||||
from changedetectionio.custom_queue import SignalPriorityQueue, AsyncSignalPriorityQueue, NotificationQueue
|
||||
from changedetectionio.queue_handlers import RecheckPriorityQueue, NotificationQueue
|
||||
from changedetectionio import worker_handler
|
||||
|
||||
from flask import (
|
||||
Flask,
|
||||
abort,
|
||||
flash,
|
||||
make_response,
|
||||
redirect,
|
||||
render_template,
|
||||
request,
|
||||
send_from_directory,
|
||||
session,
|
||||
url_for,
|
||||
)
|
||||
from flask_compress import Compress as FlaskCompress
|
||||
@@ -40,7 +38,7 @@ from loguru import logger
|
||||
|
||||
from changedetectionio import __version__
|
||||
from changedetectionio import queuedWatchMetaData
|
||||
from changedetectionio.api import Watch, WatchHistory, WatchSingleHistory, CreateWatch, Import, SystemInfo, Tag, Tags, Notifications
|
||||
from changedetectionio.api import Watch, WatchHistory, WatchSingleHistory, CreateWatch, Import, SystemInfo, Tag, Tags, Notifications, WatchFavicon
|
||||
from changedetectionio.api.Search import Search
|
||||
from .time_handler import is_within_schedule
|
||||
|
||||
@@ -50,8 +48,8 @@ datastore = None
|
||||
ticker_thread = None
|
||||
extra_stylesheets = []
|
||||
|
||||
# Use async queue by default, keep sync for backward compatibility
|
||||
update_q = AsyncSignalPriorityQueue() if worker_handler.USE_ASYNC_WORKERS else SignalPriorityQueue()
|
||||
# Use bulletproof janus-based queues for sync/async reliability
|
||||
update_q = RecheckPriorityQueue()
|
||||
notification_q = NotificationQueue()
|
||||
MAX_QUEUE_SIZE = 2000
|
||||
|
||||
@@ -100,7 +98,7 @@ watch_api = Api(app, decorators=[csrf.exempt])
|
||||
def init_app_secret(datastore_path):
|
||||
secret = ""
|
||||
|
||||
path = "{}/secret.txt".format(datastore_path)
|
||||
path = os.path.join(datastore_path, "secret.txt")
|
||||
|
||||
try:
|
||||
with open(path, "r") as f:
|
||||
@@ -307,7 +305,9 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
watch_api.add_resource(WatchSingleHistory,
|
||||
'/api/v1/watch/<string:uuid>/history/<string:timestamp>',
|
||||
resource_class_kwargs={'datastore': datastore, 'update_q': update_q})
|
||||
|
||||
watch_api.add_resource(WatchFavicon,
|
||||
'/api/v1/watch/<string:uuid>/favicon',
|
||||
resource_class_kwargs={'datastore': datastore})
|
||||
watch_api.add_resource(WatchHistory,
|
||||
'/api/v1/watch/<string:uuid>/history',
|
||||
resource_class_kwargs={'datastore': datastore})
|
||||
@@ -427,6 +427,32 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
except FileNotFoundError:
|
||||
abort(404)
|
||||
|
||||
if group == 'favicon':
|
||||
# Could be sensitive, follow password requirements
|
||||
if datastore.data['settings']['application']['password'] and not flask_login.current_user.is_authenticated:
|
||||
abort(403)
|
||||
# Get the watch object
|
||||
watch = datastore.data['watching'].get(filename)
|
||||
if not watch:
|
||||
abort(404)
|
||||
|
||||
favicon_filename = watch.get_favicon_filename()
|
||||
if favicon_filename:
|
||||
try:
|
||||
import magic
|
||||
mime = magic.from_file(
|
||||
os.path.join(watch.watch_data_dir, favicon_filename),
|
||||
mime=True
|
||||
)
|
||||
except ImportError:
|
||||
# Fallback, no python-magic
|
||||
import mimetypes
|
||||
mime, encoding = mimetypes.guess_type(favicon_filename)
|
||||
|
||||
response = make_response(send_from_directory(watch.watch_data_dir, favicon_filename))
|
||||
response.headers['Content-type'] = mime
|
||||
response.headers['Cache-Control'] = 'max-age=300, must-revalidate' # Cache for 5 minutes, then revalidate
|
||||
return response
|
||||
|
||||
if group == 'visual_selector_data':
|
||||
# Could be sensitive, follow password requirements
|
||||
@@ -818,16 +844,22 @@ def ticker_thread_check_time_launch_checks():
|
||||
|
||||
# Use Epoch time as priority, so we get a "sorted" PriorityQueue, but we can still push a priority 1 into it.
|
||||
priority = int(time.time())
|
||||
logger.debug(
|
||||
f"> Queued watch UUID {uuid} "
|
||||
f"last checked at {watch['last_checked']} "
|
||||
f"queued at {now:0.2f} priority {priority} "
|
||||
f"jitter {watch.jitter_seconds:0.2f}s, "
|
||||
f"{now - watch['last_checked']:0.2f}s since last checked")
|
||||
|
||||
# Into the queue with you
|
||||
worker_handler.queue_item_async_safe(update_q, queuedWatchMetaData.PrioritizedItem(priority=priority, item={'uuid': uuid}))
|
||||
|
||||
queued_successfully = worker_handler.queue_item_async_safe(update_q,
|
||||
queuedWatchMetaData.PrioritizedItem(priority=priority,
|
||||
item={'uuid': uuid})
|
||||
)
|
||||
if queued_successfully:
|
||||
logger.debug(
|
||||
f"> Queued watch UUID {uuid} "
|
||||
f"last checked at {watch['last_checked']} "
|
||||
f"queued at {now:0.2f} priority {priority} "
|
||||
f"jitter {watch.jitter_seconds:0.2f}s, "
|
||||
f"{now - watch['last_checked']:0.2f}s since last checked")
|
||||
else:
|
||||
logger.critical(f"CRITICAL: Failed to queue watch UUID {uuid} in ticker thread!")
|
||||
|
||||
# Reset for next time
|
||||
watch.jitter_seconds = 0
|
||||
|
||||
|
||||
@@ -396,6 +396,19 @@ def validate_url(test_url):
|
||||
# This should be wtforms.validators.
|
||||
raise ValidationError('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX or incorrect URL format')
|
||||
|
||||
|
||||
class ValidateSinglePythonRegexString(object):
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
try:
|
||||
re.compile(field.data)
|
||||
except re.error:
|
||||
message = field.gettext('RegEx \'%s\' is not a valid regular expression.')
|
||||
raise ValidationError(message % (field.data))
|
||||
|
||||
|
||||
class ValidateListRegex(object):
|
||||
"""
|
||||
Validates that anything that looks like a regex passes as a regex
|
||||
@@ -414,6 +427,7 @@ class ValidateListRegex(object):
|
||||
message = field.gettext('RegEx \'%s\' is not a valid regular expression.')
|
||||
raise ValidationError(message % (line))
|
||||
|
||||
|
||||
class ValidateCSSJSONXPATHInput(object):
|
||||
"""
|
||||
Filter validation
|
||||
@@ -738,8 +752,9 @@ class globalSettingsRequestForm(Form):
|
||||
return False
|
||||
|
||||
class globalSettingsApplicationUIForm(Form):
|
||||
open_diff_in_new_tab = BooleanField('Open diff page in a new tab', default=True, validators=[validators.Optional()])
|
||||
open_diff_in_new_tab = BooleanField("Open 'History' page in a new tab", default=True, validators=[validators.Optional()])
|
||||
socket_io_enabled = BooleanField('Realtime UI Updates Enabled', default=True, validators=[validators.Optional()])
|
||||
favicons_enabled = BooleanField('Favicons Enabled', default=True, validators=[validators.Optional()])
|
||||
|
||||
# datastore.data['settings']['application']..
|
||||
class globalSettingsApplicationForm(commonSettingsForm):
|
||||
@@ -790,5 +805,5 @@ class globalSettingsForm(Form):
|
||||
|
||||
|
||||
class extractDataForm(Form):
|
||||
extract_regex = StringField('RegEx to extract', validators=[validators.Length(min=1, message="Needs a RegEx")])
|
||||
extract_regex = StringField('RegEx to extract', validators=[validators.DataRequired(), ValidateSinglePythonRegexString()])
|
||||
extract_submit_button = SubmitField('Extract as CSV', render_kw={"class": "pure-button pure-button-primary"})
|
||||
|
||||
@@ -62,7 +62,8 @@ class model(dict):
|
||||
'timezone': None, # Default IANA timezone name
|
||||
'ui': {
|
||||
'open_diff_in_new_tab': True,
|
||||
'socket_io_enabled': True
|
||||
'socket_io_enabled': True,
|
||||
'favicons_enabled': True
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import re
|
||||
from pathlib import Path
|
||||
from loguru import logger
|
||||
|
||||
from .. import safe_jinja
|
||||
from ..html_tools import TRANSLATE_WHITESPACE_TABLE
|
||||
|
||||
# Allowable protocols, protects against javascript: etc
|
||||
@@ -102,6 +103,13 @@ class model(watch_base):
|
||||
return 'DISABLED'
|
||||
return ready_url
|
||||
|
||||
@property
|
||||
def domain_only_from_link(self):
|
||||
from urllib.parse import urlparse
|
||||
parsed = urlparse(self.link)
|
||||
domain = parsed.hostname
|
||||
return domain
|
||||
|
||||
def clear_watch(self):
|
||||
import pathlib
|
||||
|
||||
@@ -412,6 +420,132 @@ class model(watch_base):
|
||||
# False is not an option for AppRise, must be type None
|
||||
return None
|
||||
|
||||
def bump_favicon(self, url, favicon_base_64: str) -> None:
|
||||
from urllib.parse import urlparse
|
||||
import base64
|
||||
import binascii
|
||||
decoded = None
|
||||
|
||||
if url:
|
||||
try:
|
||||
parsed = urlparse(url)
|
||||
filename = os.path.basename(parsed.path)
|
||||
(base, extension) = filename.lower().strip().rsplit('.', 1)
|
||||
except ValueError:
|
||||
logger.error(f"UUID: {self.get('uuid')} Cant work out file extension from '{url}'")
|
||||
return None
|
||||
else:
|
||||
# Assume favicon.ico
|
||||
base = "favicon"
|
||||
extension = "ico"
|
||||
|
||||
fname = os.path.join(self.watch_data_dir, f"favicon.{extension}")
|
||||
|
||||
try:
|
||||
# validate=True makes sure the string only contains valid base64 chars
|
||||
decoded = base64.b64decode(favicon_base_64, validate=True)
|
||||
except (binascii.Error, ValueError) as e:
|
||||
logger.warning(f"UUID: {self.get('uuid')} FavIcon save data (Base64) corrupt? {str(e)}")
|
||||
else:
|
||||
if decoded:
|
||||
try:
|
||||
with open(fname, 'wb') as f:
|
||||
f.write(decoded)
|
||||
# A signal that could trigger the socket server to update the browser also
|
||||
watch_check_update = signal('watch_favicon_bump')
|
||||
if watch_check_update:
|
||||
watch_check_update.send(watch_uuid=self.get('uuid'))
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"UUID: {self.get('uuid')} error saving FavIcon to {fname} - {str(e)}")
|
||||
|
||||
# @todo - Store some checksum and only write when its different
|
||||
logger.debug(f"UUID: {self.get('uuid')} updated favicon to at {fname}")
|
||||
|
||||
def get_favicon_filename(self) -> str | None:
|
||||
"""
|
||||
Find any favicon.* file in the current working directory
|
||||
and return the contents of the newest one.
|
||||
|
||||
Returns:
|
||||
bytes: Contents of the newest favicon file, or None if not found.
|
||||
"""
|
||||
import glob
|
||||
|
||||
# Search for all favicon.* files
|
||||
files = glob.glob(os.path.join(self.watch_data_dir, "favicon.*"))
|
||||
|
||||
if not files:
|
||||
return None
|
||||
|
||||
# Find the newest by modification time
|
||||
newest_file = max(files, key=os.path.getmtime)
|
||||
return os.path.basename(newest_file)
|
||||
|
||||
def get_screenshot_as_thumbnail(self, max_age=3200):
|
||||
"""Return path to a square thumbnail of the most recent screenshot.
|
||||
|
||||
Creates a 150x150 pixel thumbnail from the top portion of the screenshot.
|
||||
|
||||
Args:
|
||||
max_age: Maximum age in seconds before recreating thumbnail
|
||||
|
||||
Returns:
|
||||
Path to thumbnail or None if no screenshot exists
|
||||
"""
|
||||
import os
|
||||
import time
|
||||
|
||||
thumbnail_path = os.path.join(self.watch_data_dir, "thumbnail.jpeg")
|
||||
top_trim = 500 # Pixels from top of screenshot to use
|
||||
|
||||
screenshot_path = self.get_screenshot()
|
||||
if not screenshot_path:
|
||||
return None
|
||||
|
||||
# Reuse thumbnail if it's fresh and screenshot hasn't changed
|
||||
if os.path.isfile(thumbnail_path):
|
||||
thumbnail_mtime = os.path.getmtime(thumbnail_path)
|
||||
screenshot_mtime = os.path.getmtime(screenshot_path)
|
||||
|
||||
if screenshot_mtime <= thumbnail_mtime and time.time() - thumbnail_mtime < max_age:
|
||||
return thumbnail_path
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
|
||||
with Image.open(screenshot_path) as img:
|
||||
# Crop top portion first (full width, top_trim height)
|
||||
top_crop_height = min(top_trim, img.height)
|
||||
img = img.crop((0, 0, img.width, top_crop_height))
|
||||
|
||||
# Create a smaller intermediate image (to reduce memory usage)
|
||||
aspect = img.width / img.height
|
||||
interim_width = min(top_trim, img.width)
|
||||
interim_height = int(interim_width / aspect) if aspect > 0 else top_trim
|
||||
img = img.resize((interim_width, interim_height), Image.NEAREST)
|
||||
|
||||
# Convert to RGB if needed
|
||||
if img.mode != 'RGB':
|
||||
img = img.convert('RGB')
|
||||
|
||||
# Crop to square from top center
|
||||
square_size = min(img.width, img.height)
|
||||
left = (img.width - square_size) // 2
|
||||
img = img.crop((left, 0, left + square_size, square_size))
|
||||
|
||||
# Final resize to exact thumbnail size with better filter
|
||||
img = img.resize((350, 350), Image.BILINEAR)
|
||||
|
||||
# Save with optimized settings
|
||||
img.save(thumbnail_path, "JPEG", quality=75, optimize=True)
|
||||
|
||||
return thumbnail_path
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating thumbnail for {self.get('uuid')}: {str(e)}")
|
||||
return None
|
||||
|
||||
def __get_file_ctime(self, filename):
|
||||
fname = os.path.join(self.watch_data_dir, filename)
|
||||
if os.path.isfile(fname):
|
||||
@@ -505,7 +639,7 @@ class model(watch_base):
|
||||
if res:
|
||||
if not csv_writer:
|
||||
# A file on the disk can be transferred much faster via flask than a string reply
|
||||
csv_output_filename = 'report.csv'
|
||||
csv_output_filename = f"report-{self.get('uuid')}.csv"
|
||||
f = open(os.path.join(self.watch_data_dir, csv_output_filename), 'w')
|
||||
# @todo some headers in the future
|
||||
#fieldnames = ['Epoch seconds', 'Date']
|
||||
@@ -691,11 +825,11 @@ class model(watch_base):
|
||||
output.append(str(Markup(f"<div class=\"notification-error\"><a href=\"{url_for('settings.notification_logs')}\">{ self.get('last_notification_error') }</a></div>")))
|
||||
|
||||
else:
|
||||
# Lo_Fi version
|
||||
# Lo_Fi version - no app context, cant rely on Jinja2 Markup
|
||||
if last_error:
|
||||
output.append(str(Markup(last_error)))
|
||||
output.append(safe_jinja.render_fully_escaped(last_error))
|
||||
if self.get('last_notification_error'):
|
||||
output.append(str(Markup(self.get('last_notification_error'))))
|
||||
output.append(safe_jinja.render_fully_escaped(self.get('last_notification_error')))
|
||||
|
||||
res = "\n".join(output)
|
||||
return res
|
||||
|
||||
@@ -3,6 +3,7 @@ import uuid
|
||||
|
||||
from changedetectionio import strtobool
|
||||
default_notification_format_for_watch = 'System default'
|
||||
CONDITIONS_MATCH_LOGIC_DEFAULT = 'ALL'
|
||||
|
||||
class watch_base(dict):
|
||||
|
||||
@@ -15,6 +16,8 @@ class watch_base(dict):
|
||||
'body': None,
|
||||
'browser_steps': [],
|
||||
'browser_steps_last_error_step': None,
|
||||
'conditions' : {},
|
||||
'conditions_match_logic': CONDITIONS_MATCH_LOGIC_DEFAULT,
|
||||
'check_count': 0,
|
||||
'check_unique_lines': False, # On change-detected, compare against all history if its something new
|
||||
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
|
||||
|
||||
430
changedetectionio/queue_handlers.py
Normal file
430
changedetectionio/queue_handlers.py
Normal file
@@ -0,0 +1,430 @@
|
||||
import heapq
|
||||
import threading
|
||||
from typing import Dict, List, Any, Optional
|
||||
from blinker import signal
|
||||
from loguru import logger
|
||||
|
||||
try:
|
||||
import janus
|
||||
except ImportError:
|
||||
logger.critical("CRITICAL: janus library is required. Install with: pip install janus")
|
||||
raise
|
||||
|
||||
|
||||
class RecheckPriorityQueue:
|
||||
"""
|
||||
Ultra-reliable priority queue using janus for async/sync bridging.
|
||||
|
||||
CRITICAL DESIGN NOTE: Both sync_q and async_q are required because:
|
||||
- sync_q: Used by Flask routes, ticker threads, and other synchronous code
|
||||
- async_q: Used by async workers (the actual fetchers/processors) and coroutines
|
||||
|
||||
DO NOT REMOVE EITHER INTERFACE - they bridge different execution contexts:
|
||||
- Synchronous code (Flask, threads) cannot use async methods without blocking
|
||||
- Async code cannot use sync methods without blocking the event loop
|
||||
- janus provides the only safe bridge between these two worlds
|
||||
|
||||
Attempting to unify to async-only would require:
|
||||
- Converting all Flask routes to async (major breaking change)
|
||||
- Using asyncio.run() in sync contexts (causes deadlocks)
|
||||
- Thread-pool wrapping (adds complexity and overhead)
|
||||
|
||||
Minimal implementation focused on reliability:
|
||||
- Pure janus for sync/async bridge
|
||||
- Thread-safe priority ordering
|
||||
- Bulletproof error handling with critical logging
|
||||
"""
|
||||
|
||||
def __init__(self, maxsize: int = 0):
|
||||
try:
|
||||
self._janus_queue = janus.Queue(maxsize=maxsize)
|
||||
# BOTH interfaces required - see class docstring for why
|
||||
self.sync_q = self._janus_queue.sync_q # Flask routes, ticker thread
|
||||
self.async_q = self._janus_queue.async_q # Async workers
|
||||
|
||||
# Priority storage - thread-safe
|
||||
self._priority_items = []
|
||||
self._lock = threading.RLock()
|
||||
|
||||
# Signals for UI updates
|
||||
self.queue_length_signal = signal('queue_length')
|
||||
|
||||
logger.debug("RecheckPriorityQueue initialized successfully")
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to initialize RecheckPriorityQueue: {e}")
|
||||
raise
|
||||
|
||||
# SYNC INTERFACE (for ticker thread)
|
||||
def put(self, item, block: bool = True, timeout: Optional[float] = None):
|
||||
"""Thread-safe sync put with priority ordering"""
|
||||
try:
|
||||
# Add to priority storage
|
||||
with self._lock:
|
||||
heapq.heappush(self._priority_items, item)
|
||||
|
||||
# Notify via janus sync queue
|
||||
self.sync_q.put(True, block=block, timeout=timeout)
|
||||
|
||||
# Emit signals
|
||||
self._emit_put_signals(item)
|
||||
|
||||
logger.debug(f"Successfully queued item: {self._get_item_uuid(item)}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to put item {self._get_item_uuid(item)}: {e}")
|
||||
# Remove from priority storage if janus put failed
|
||||
try:
|
||||
with self._lock:
|
||||
if item in self._priority_items:
|
||||
self._priority_items.remove(item)
|
||||
heapq.heapify(self._priority_items)
|
||||
except Exception as cleanup_e:
|
||||
logger.critical(f"CRITICAL: Failed to cleanup after put failure: {cleanup_e}")
|
||||
return False
|
||||
|
||||
def get(self, block: bool = True, timeout: Optional[float] = None):
|
||||
"""Thread-safe sync get with priority ordering"""
|
||||
try:
|
||||
# Wait for notification
|
||||
self.sync_q.get(block=block, timeout=timeout)
|
||||
|
||||
# Get highest priority item
|
||||
with self._lock:
|
||||
if not self._priority_items:
|
||||
logger.critical("CRITICAL: Queue notification received but no priority items available")
|
||||
raise Exception("Priority queue inconsistency")
|
||||
item = heapq.heappop(self._priority_items)
|
||||
|
||||
# Emit signals
|
||||
self._emit_get_signals()
|
||||
|
||||
logger.debug(f"Successfully retrieved item: {self._get_item_uuid(item)}")
|
||||
return item
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to get item from queue: {e}")
|
||||
raise
|
||||
|
||||
# ASYNC INTERFACE (for workers)
|
||||
async def async_put(self, item):
|
||||
"""Pure async put with priority ordering"""
|
||||
try:
|
||||
# Add to priority storage
|
||||
with self._lock:
|
||||
heapq.heappush(self._priority_items, item)
|
||||
|
||||
# Notify via janus async queue
|
||||
await self.async_q.put(True)
|
||||
|
||||
# Emit signals
|
||||
self._emit_put_signals(item)
|
||||
|
||||
logger.debug(f"Successfully async queued item: {self._get_item_uuid(item)}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to async put item {self._get_item_uuid(item)}: {e}")
|
||||
# Remove from priority storage if janus put failed
|
||||
try:
|
||||
with self._lock:
|
||||
if item in self._priority_items:
|
||||
self._priority_items.remove(item)
|
||||
heapq.heapify(self._priority_items)
|
||||
except Exception as cleanup_e:
|
||||
logger.critical(f"CRITICAL: Failed to cleanup after async put failure: {cleanup_e}")
|
||||
return False
|
||||
|
||||
async def async_get(self):
|
||||
"""Pure async get with priority ordering"""
|
||||
try:
|
||||
# Wait for notification
|
||||
await self.async_q.get()
|
||||
|
||||
# Get highest priority item
|
||||
with self._lock:
|
||||
if not self._priority_items:
|
||||
logger.critical("CRITICAL: Async queue notification received but no priority items available")
|
||||
raise Exception("Priority queue inconsistency")
|
||||
item = heapq.heappop(self._priority_items)
|
||||
|
||||
# Emit signals
|
||||
self._emit_get_signals()
|
||||
|
||||
logger.debug(f"Successfully async retrieved item: {self._get_item_uuid(item)}")
|
||||
return item
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to async get item from queue: {e}")
|
||||
raise
|
||||
|
||||
# UTILITY METHODS
|
||||
def qsize(self) -> int:
|
||||
"""Get current queue size"""
|
||||
try:
|
||||
with self._lock:
|
||||
return len(self._priority_items)
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to get queue size: {e}")
|
||||
return 0
|
||||
|
||||
def empty(self) -> bool:
|
||||
"""Check if queue is empty"""
|
||||
return self.qsize() == 0
|
||||
|
||||
def close(self):
|
||||
"""Close the janus queue"""
|
||||
try:
|
||||
self._janus_queue.close()
|
||||
logger.debug("RecheckPriorityQueue closed successfully")
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to close RecheckPriorityQueue: {e}")
|
||||
|
||||
# COMPATIBILITY METHODS (from original implementation)
|
||||
@property
|
||||
def queue(self):
|
||||
"""Provide compatibility with original queue access"""
|
||||
try:
|
||||
with self._lock:
|
||||
return list(self._priority_items)
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to get queue list: {e}")
|
||||
return []
|
||||
|
||||
def get_uuid_position(self, target_uuid: str) -> Dict[str, Any]:
|
||||
"""Find position of UUID in queue"""
|
||||
try:
|
||||
with self._lock:
|
||||
queue_list = list(self._priority_items)
|
||||
total_items = len(queue_list)
|
||||
|
||||
if total_items == 0:
|
||||
return {'position': None, 'total_items': 0, 'priority': None, 'found': False}
|
||||
|
||||
# Find target item
|
||||
for item in queue_list:
|
||||
if (hasattr(item, 'item') and isinstance(item.item, dict) and
|
||||
item.item.get('uuid') == target_uuid):
|
||||
|
||||
# Count items with higher priority
|
||||
position = sum(1 for other in queue_list if other.priority < item.priority)
|
||||
return {
|
||||
'position': position,
|
||||
'total_items': total_items,
|
||||
'priority': item.priority,
|
||||
'found': True
|
||||
}
|
||||
|
||||
return {'position': None, 'total_items': total_items, 'priority': None, 'found': False}
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to get UUID position for {target_uuid}: {e}")
|
||||
return {'position': None, 'total_items': 0, 'priority': None, 'found': False}
|
||||
|
||||
def get_all_queued_uuids(self, limit: Optional[int] = None, offset: int = 0) -> Dict[str, Any]:
|
||||
"""Get all queued UUIDs with pagination"""
|
||||
try:
|
||||
with self._lock:
|
||||
queue_list = sorted(self._priority_items) # Sort by priority
|
||||
total_items = len(queue_list)
|
||||
|
||||
if total_items == 0:
|
||||
return {'items': [], 'total_items': 0, 'returned_items': 0, 'has_more': False}
|
||||
|
||||
# Apply pagination
|
||||
end_idx = min(offset + limit, total_items) if limit else total_items
|
||||
items_to_process = queue_list[offset:end_idx]
|
||||
|
||||
result = []
|
||||
for position, item in enumerate(items_to_process, start=offset):
|
||||
if (hasattr(item, 'item') and isinstance(item.item, dict) and
|
||||
'uuid' in item.item):
|
||||
result.append({
|
||||
'uuid': item.item['uuid'],
|
||||
'position': position,
|
||||
'priority': item.priority
|
||||
})
|
||||
|
||||
return {
|
||||
'items': result,
|
||||
'total_items': total_items,
|
||||
'returned_items': len(result),
|
||||
'has_more': (offset + len(result)) < total_items
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to get all queued UUIDs: {e}")
|
||||
return {'items': [], 'total_items': 0, 'returned_items': 0, 'has_more': False}
|
||||
|
||||
def get_queue_summary(self) -> Dict[str, Any]:
|
||||
"""Get queue summary statistics"""
|
||||
try:
|
||||
with self._lock:
|
||||
queue_list = list(self._priority_items)
|
||||
total_items = len(queue_list)
|
||||
|
||||
if total_items == 0:
|
||||
return {
|
||||
'total_items': 0, 'priority_breakdown': {},
|
||||
'immediate_items': 0, 'clone_items': 0, 'scheduled_items': 0
|
||||
}
|
||||
|
||||
immediate_items = clone_items = scheduled_items = 0
|
||||
priority_counts = {}
|
||||
|
||||
for item in queue_list:
|
||||
priority = item.priority
|
||||
priority_counts[priority] = priority_counts.get(priority, 0) + 1
|
||||
|
||||
if priority == 1:
|
||||
immediate_items += 1
|
||||
elif priority == 5:
|
||||
clone_items += 1
|
||||
elif priority > 100:
|
||||
scheduled_items += 1
|
||||
|
||||
return {
|
||||
'total_items': total_items,
|
||||
'priority_breakdown': priority_counts,
|
||||
'immediate_items': immediate_items,
|
||||
'clone_items': clone_items,
|
||||
'scheduled_items': scheduled_items,
|
||||
'min_priority': min(priority_counts.keys()) if priority_counts else None,
|
||||
'max_priority': max(priority_counts.keys()) if priority_counts else None
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to get queue summary: {e}")
|
||||
return {'total_items': 0, 'priority_breakdown': {}, 'immediate_items': 0,
|
||||
'clone_items': 0, 'scheduled_items': 0}
|
||||
|
||||
# PRIVATE METHODS
|
||||
def _get_item_uuid(self, item) -> str:
|
||||
"""Safely extract UUID from item for logging"""
|
||||
try:
|
||||
if hasattr(item, 'item') and isinstance(item.item, dict):
|
||||
return item.item.get('uuid', 'unknown')
|
||||
except Exception:
|
||||
pass
|
||||
return 'unknown'
|
||||
|
||||
def _emit_put_signals(self, item):
|
||||
"""Emit signals when item is added"""
|
||||
try:
|
||||
# Watch update signal
|
||||
if hasattr(item, 'item') and isinstance(item.item, dict) and 'uuid' in item.item:
|
||||
watch_check_update = signal('watch_check_update')
|
||||
if watch_check_update:
|
||||
watch_check_update.send(watch_uuid=item.item['uuid'])
|
||||
|
||||
# Queue length signal
|
||||
if self.queue_length_signal:
|
||||
self.queue_length_signal.send(length=self.qsize())
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to emit put signals: {e}")
|
||||
|
||||
def _emit_get_signals(self):
|
||||
"""Emit signals when item is removed"""
|
||||
try:
|
||||
if self.queue_length_signal:
|
||||
self.queue_length_signal.send(length=self.qsize())
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to emit get signals: {e}")
|
||||
|
||||
|
||||
class NotificationQueue:
|
||||
"""
|
||||
Ultra-reliable notification queue using pure janus.
|
||||
|
||||
CRITICAL DESIGN NOTE: Both sync_q and async_q are required because:
|
||||
- sync_q: Used by Flask routes, ticker threads, and other synchronous code
|
||||
- async_q: Used by async workers and coroutines
|
||||
|
||||
DO NOT REMOVE EITHER INTERFACE - they bridge different execution contexts.
|
||||
See RecheckPriorityQueue docstring above for detailed explanation.
|
||||
|
||||
Simple wrapper around janus with bulletproof error handling.
|
||||
"""
|
||||
|
||||
def __init__(self, maxsize: int = 0):
|
||||
try:
|
||||
self._janus_queue = janus.Queue(maxsize=maxsize)
|
||||
# BOTH interfaces required - see class docstring for why
|
||||
self.sync_q = self._janus_queue.sync_q # Flask routes, threads
|
||||
self.async_q = self._janus_queue.async_q # Async workers
|
||||
self.notification_event_signal = signal('notification_event')
|
||||
logger.debug("NotificationQueue initialized successfully")
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to initialize NotificationQueue: {e}")
|
||||
raise
|
||||
|
||||
def put(self, item: Dict[str, Any], block: bool = True, timeout: Optional[float] = None):
|
||||
"""Thread-safe sync put with signal emission"""
|
||||
try:
|
||||
self.sync_q.put(item, block=block, timeout=timeout)
|
||||
self._emit_notification_signal(item)
|
||||
logger.debug(f"Successfully queued notification: {item.get('uuid', 'unknown')}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to put notification {item.get('uuid', 'unknown')}: {e}")
|
||||
return False
|
||||
|
||||
async def async_put(self, item: Dict[str, Any]):
|
||||
"""Pure async put with signal emission"""
|
||||
try:
|
||||
await self.async_q.put(item)
|
||||
self._emit_notification_signal(item)
|
||||
logger.debug(f"Successfully async queued notification: {item.get('uuid', 'unknown')}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to async put notification {item.get('uuid', 'unknown')}: {e}")
|
||||
return False
|
||||
|
||||
def get(self, block: bool = True, timeout: Optional[float] = None):
|
||||
"""Thread-safe sync get"""
|
||||
try:
|
||||
return self.sync_q.get(block=block, timeout=timeout)
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to get notification: {e}")
|
||||
raise
|
||||
|
||||
async def async_get(self):
|
||||
"""Pure async get"""
|
||||
try:
|
||||
return await self.async_q.get()
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to async get notification: {e}")
|
||||
raise
|
||||
|
||||
def qsize(self) -> int:
|
||||
"""Get current queue size"""
|
||||
try:
|
||||
return self.sync_q.qsize()
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to get notification queue size: {e}")
|
||||
return 0
|
||||
|
||||
def empty(self) -> bool:
|
||||
"""Check if queue is empty"""
|
||||
return self.qsize() == 0
|
||||
|
||||
def close(self):
|
||||
"""Close the janus queue"""
|
||||
try:
|
||||
self._janus_queue.close()
|
||||
logger.debug("NotificationQueue closed successfully")
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to close NotificationQueue: {e}")
|
||||
|
||||
def _emit_notification_signal(self, item: Dict[str, Any]):
|
||||
"""Emit notification signal"""
|
||||
try:
|
||||
if self.notification_event_signal and isinstance(item, dict):
|
||||
watch_uuid = item.get('uuid')
|
||||
if watch_uuid:
|
||||
self.notification_event_signal.send(watch_uuid=watch_uuid)
|
||||
else:
|
||||
self.notification_event_signal.send()
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Failed to emit notification signal: {e}")
|
||||
@@ -29,6 +29,9 @@ class SignalHandler:
|
||||
watch_delete_signal = signal('watch_deleted')
|
||||
watch_delete_signal.connect(self.handle_deleted_signal, weak=False)
|
||||
|
||||
watch_favicon_bumped_signal = signal('watch_favicon_bump')
|
||||
watch_favicon_bumped_signal.connect(self.handle_watch_bumped_favicon_signal, weak=False)
|
||||
|
||||
# Connect to the notification_event signal
|
||||
notification_event_signal = signal('notification_event')
|
||||
notification_event_signal.connect(self.handle_notification_event, weak=False)
|
||||
@@ -37,7 +40,7 @@ class SignalHandler:
|
||||
# Create and start the queue update thread using standard threading
|
||||
import threading
|
||||
self.polling_emitter_thread = threading.Thread(
|
||||
target=self.polling_emit_running_or_queued_watches_threaded,
|
||||
target=self.polling_emit_running_or_queued_watches_threaded,
|
||||
daemon=True
|
||||
)
|
||||
self.polling_emitter_thread.start()
|
||||
@@ -69,6 +72,16 @@ class SignalHandler:
|
||||
else:
|
||||
logger.warning(f"Watch UUID {watch_uuid} not found in datastore")
|
||||
|
||||
def handle_watch_bumped_favicon_signal(self, *args, **kwargs):
|
||||
watch_uuid = kwargs.get('watch_uuid')
|
||||
if watch_uuid:
|
||||
# Emit the queue size to all connected clients
|
||||
self.socketio_instance.emit("watch_bumped_favicon", {
|
||||
"uuid": watch_uuid,
|
||||
"event_timestamp": time.time()
|
||||
})
|
||||
logger.debug(f"Watch UUID {watch_uuid} got its favicon updated")
|
||||
|
||||
def handle_deleted_signal(self, *args, **kwargs):
|
||||
watch_uuid = kwargs.get('watch_uuid')
|
||||
if watch_uuid:
|
||||
@@ -105,39 +118,38 @@ class SignalHandler:
|
||||
"watch_uuid": watch_uuid,
|
||||
"event_timestamp": time.time()
|
||||
})
|
||||
|
||||
|
||||
logger.trace(f"Socket.IO: Emitted notification_event for watch UUID {watch_uuid}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Socket.IO error in handle_notification_event: {str(e)}")
|
||||
|
||||
|
||||
def polling_emit_running_or_queued_watches_threaded(self):
|
||||
"""Threading version of polling for Windows compatibility"""
|
||||
import time
|
||||
import threading
|
||||
logger.info("Queue update thread started (threading mode)")
|
||||
|
||||
|
||||
# Import here to avoid circular imports
|
||||
from changedetectionio.flask_app import app
|
||||
from changedetectionio import worker_handler
|
||||
watch_check_update = signal('watch_check_update')
|
||||
|
||||
|
||||
# Track previous state to avoid unnecessary emissions
|
||||
previous_running_uuids = set()
|
||||
|
||||
|
||||
# Run until app shutdown - check exit flag more frequently for fast shutdown
|
||||
exit_event = getattr(app.config, 'exit', threading.Event())
|
||||
|
||||
|
||||
while not exit_event.is_set():
|
||||
try:
|
||||
# Get current running UUIDs from async workers
|
||||
running_uuids = set(worker_handler.get_running_uuids())
|
||||
|
||||
|
||||
# Only send updates for UUIDs that changed state
|
||||
newly_running = running_uuids - previous_running_uuids
|
||||
no_longer_running = previous_running_uuids - running_uuids
|
||||
|
||||
|
||||
# Send updates for newly running UUIDs (but exit fast if shutdown requested)
|
||||
for uuid in newly_running:
|
||||
if exit_event.is_set():
|
||||
@@ -146,7 +158,7 @@ class SignalHandler:
|
||||
with app.app_context():
|
||||
watch_check_update.send(app_context=app, watch_uuid=uuid)
|
||||
time.sleep(0.01) # Small yield
|
||||
|
||||
|
||||
# Send updates for UUIDs that finished processing (but exit fast if shutdown requested)
|
||||
if not exit_event.is_set():
|
||||
for uuid in no_longer_running:
|
||||
@@ -156,16 +168,16 @@ class SignalHandler:
|
||||
with app.app_context():
|
||||
watch_check_update.send(app_context=app, watch_uuid=uuid)
|
||||
time.sleep(0.01) # Small yield
|
||||
|
||||
|
||||
# Update tracking for next iteration
|
||||
previous_running_uuids = running_uuids
|
||||
|
||||
|
||||
# Sleep between polling cycles, but check exit flag every 0.5 seconds for fast shutdown
|
||||
for _ in range(20): # 20 * 0.5 = 10 seconds total
|
||||
if exit_event.is_set():
|
||||
break
|
||||
time.sleep(0.5)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in threading polling: {str(e)}")
|
||||
# Even during error recovery, check for exit quickly
|
||||
@@ -173,11 +185,11 @@ class SignalHandler:
|
||||
if exit_event.is_set():
|
||||
break
|
||||
time.sleep(0.5)
|
||||
|
||||
|
||||
# Check if we're in pytest environment - if so, be more gentle with logging
|
||||
import sys
|
||||
in_pytest = "pytest" in sys.modules or "PYTEST_CURRENT_TEST" in os.environ
|
||||
|
||||
|
||||
if not in_pytest:
|
||||
logger.info("Queue update thread stopped (threading mode)")
|
||||
|
||||
@@ -208,20 +220,20 @@ def handle_watch_update(socketio, **kwargs):
|
||||
|
||||
watch_data = {
|
||||
'checking_now': True if watch.get('uuid') in running_uuids else False,
|
||||
'error_text': error_texts,
|
||||
'event_timestamp': time.time(),
|
||||
'fetch_time': watch.get('fetch_time'),
|
||||
'has_error': True if error_texts else False,
|
||||
'last_changed': watch.get('last_changed'),
|
||||
'last_checked': watch.get('last_checked'),
|
||||
'error_text': error_texts,
|
||||
'has_favicon': True if watch.get_favicon_filename() else False,
|
||||
'history_n': watch.history_n,
|
||||
'last_checked_text': _jinja2_filter_datetime(watch),
|
||||
'last_changed_text': timeago.format(int(watch.last_changed), time.time()) if watch.history_n >= 2 and int(watch.last_changed) > 0 else 'Not yet',
|
||||
'queued': True if watch.get('uuid') in queue_list else False,
|
||||
'paused': True if watch.get('paused') else False,
|
||||
'last_checked': watch.get('last_checked'),
|
||||
'last_checked_text': _jinja2_filter_datetime(watch),
|
||||
'notification_muted': True if watch.get('notification_muted') else False,
|
||||
'paused': True if watch.get('paused') else False,
|
||||
'queued': True if watch.get('uuid') in queue_list else False,
|
||||
'unviewed': watch.has_unviewed,
|
||||
'uuid': watch.get('uuid'),
|
||||
'event_timestamp': time.time()
|
||||
}
|
||||
|
||||
errored_count = 0
|
||||
@@ -251,15 +263,15 @@ def init_socketio(app, datastore):
|
||||
"""Initialize SocketIO with the main Flask app"""
|
||||
import platform
|
||||
import sys
|
||||
|
||||
|
||||
# Platform-specific async_mode selection for better stability
|
||||
system = platform.system().lower()
|
||||
python_version = sys.version_info
|
||||
|
||||
|
||||
# Check for SocketIO mode configuration via environment variable
|
||||
# Default is 'threading' for best cross-platform compatibility
|
||||
socketio_mode = os.getenv('SOCKETIO_MODE', 'threading').lower()
|
||||
|
||||
|
||||
if socketio_mode == 'gevent':
|
||||
# Use gevent mode (higher concurrency but platform limitations)
|
||||
try:
|
||||
@@ -277,7 +289,7 @@ def init_socketio(app, datastore):
|
||||
# Invalid mode specified, use default
|
||||
async_mode = 'threading'
|
||||
logger.warning(f"Invalid SOCKETIO_MODE='{socketio_mode}', using default {async_mode} mode for Socket.IO")
|
||||
|
||||
|
||||
# Log platform info for debugging
|
||||
logger.info(f"Platform: {system}, Python: {python_version.major}.{python_version.minor}, Socket.IO mode: {async_mode}")
|
||||
|
||||
@@ -315,7 +327,6 @@ def init_socketio(app, datastore):
|
||||
emit_flash=False
|
||||
)
|
||||
|
||||
|
||||
@socketio.on('connect')
|
||||
def handle_connect():
|
||||
"""Handle client connection"""
|
||||
@@ -393,4 +404,4 @@ def init_socketio(app, datastore):
|
||||
|
||||
logger.info("Socket.IO initialized and attached to main Flask app")
|
||||
logger.info(f"Socket.IO: Registered event handlers: {socketio.handlers if hasattr(socketio, 'handlers') else 'No handlers found'}")
|
||||
return socketio
|
||||
return socketio
|
||||
@@ -10,9 +10,15 @@ import os
|
||||
|
||||
JINJA2_MAX_RETURN_PAYLOAD_SIZE = 1024 * int(os.getenv("JINJA2_MAX_RETURN_PAYLOAD_SIZE_KB", 1024 * 10))
|
||||
|
||||
|
||||
# This is used for notifications etc, so actually it's OK to send custom HTML such as <a href> etc, but it should limit what data is available.
|
||||
# (Which also limits available functions that could be called)
|
||||
def render(template_str, **args: t.Any) -> str:
|
||||
jinja2_env = jinja2.sandbox.ImmutableSandboxedEnvironment(extensions=['jinja2_time.TimeExtension'])
|
||||
output = jinja2_env.from_string(template_str).render(args)
|
||||
return output[:JINJA2_MAX_RETURN_PAYLOAD_SIZE]
|
||||
|
||||
def render_fully_escaped(content):
|
||||
env = jinja2.sandbox.ImmutableSandboxedEnvironment(autoescape=True)
|
||||
template = env.from_string("{{ some_html|e }}")
|
||||
return template.render(some_html=content)
|
||||
|
||||
|
||||
@@ -159,6 +159,7 @@
|
||||
// Return the current request in case it's needed
|
||||
return requests[namespace];
|
||||
};
|
||||
|
||||
})(jQuery);
|
||||
|
||||
|
||||
|
||||
@@ -48,9 +48,9 @@ $(document).ready(function () {
|
||||
// Connect to Socket.IO on the same host/port, with path from template
|
||||
const socket = io({
|
||||
path: socketio_url, // This will be the path prefix like "/app/socket.io" from the template
|
||||
transports: ['polling', 'websocket'], // Try WebSocket but fall back to polling
|
||||
reconnectionDelay: 1000,
|
||||
reconnectionAttempts: 15
|
||||
transports: ['websocket', 'polling'],
|
||||
reconnectionDelay: 3000,
|
||||
reconnectionAttempts: 25
|
||||
});
|
||||
|
||||
// Connection status logging
|
||||
@@ -98,8 +98,24 @@ $(document).ready(function () {
|
||||
console.log(`Stub handler for notification_event ${data.watch_uuid}`)
|
||||
});
|
||||
|
||||
// Listen for periodically emitted watch data
|
||||
console.log('Adding watch_update event listener');
|
||||
socket.on('watch_deleted', function (data) {
|
||||
$('tr[data-watch-uuid="' + data.uuid + '"] td').fadeOut(500, function () {
|
||||
$(this).closest('tr').remove();
|
||||
});
|
||||
});
|
||||
|
||||
// So that the favicon is only updated when the server has written the scraped favicon to disk.
|
||||
socket.on('watch_bumped_favicon', function (watch) {
|
||||
const $watchRow = $(`tr[data-watch-uuid="${watch.uuid}"]`);
|
||||
if ($watchRow.length) {
|
||||
$watchRow.addClass('has-favicon');
|
||||
// Because the event could be emitted from a process that is outside the app context, url_for() might not work.
|
||||
// Lets use url_for at template generation time to give us a PLACEHOLDER instead
|
||||
let favicon_url = favicon_baseURL.replace('/PLACEHOLDER', `/${watch.uuid}?cache=${watch.event_timestamp}`);
|
||||
console.log(`Setting favicon for UUID - ${watch.uuid} - ${favicon_url}`);
|
||||
$('img.favicon', $watchRow).attr('src', favicon_url);
|
||||
}
|
||||
})
|
||||
|
||||
socket.on('watch_update', function (data) {
|
||||
const watch = data.watch;
|
||||
@@ -110,29 +126,28 @@ $(document).ready(function () {
|
||||
console.log(`${watch.event_timestamp} - Watch update ${watch.uuid} - Checking now - ${watch.checking_now} - UUID in URL ${window.location.href.includes(watch.uuid)}`);
|
||||
console.log('Watch data:', watch);
|
||||
console.log('General stats:', general_stats);
|
||||
|
||||
|
||||
// Updating watch table rows
|
||||
const $watchRow = $('tr[data-watch-uuid="' + watch.uuid + '"]');
|
||||
console.log('Found watch row elements:', $watchRow.length);
|
||||
|
||||
|
||||
if ($watchRow.length) {
|
||||
$($watchRow).toggleClass('checking-now', watch.checking_now);
|
||||
$($watchRow).toggleClass('queued', watch.queued);
|
||||
$($watchRow).toggleClass('unviewed', watch.unviewed);
|
||||
$($watchRow).toggleClass('has-error', watch.has_error);
|
||||
$($watchRow).toggleClass('has-favicon', watch.has_favicon);
|
||||
$($watchRow).toggleClass('notification_muted', watch.notification_muted);
|
||||
$($watchRow).toggleClass('paused', watch.paused);
|
||||
$($watchRow).toggleClass('single-history', watch.history_n === 1);
|
||||
$($watchRow).toggleClass('multiple-history', watch.history_n >= 2);
|
||||
|
||||
$('td.title-col .error-text', $watchRow).html(watch.error_text)
|
||||
|
||||
$('td.last-changed', $watchRow).text(watch.last_changed_text)
|
||||
|
||||
$('td.last-checked .innertext', $watchRow).text(watch.last_checked_text)
|
||||
$('td.last-checked', $watchRow).data('timestamp', watch.last_checked).data('fetchduration', watch.fetch_time);
|
||||
$('td.last-checked', $watchRow).data('eta_complete', watch.last_checked + watch.fetch_time);
|
||||
|
||||
|
||||
console.log('Updated UI for watch:', watch.uuid);
|
||||
}
|
||||
|
||||
|
||||
@@ -16,6 +16,12 @@ $(function () {
|
||||
$('#op_extradata').val(prompt("Enter a tag name"));
|
||||
});
|
||||
|
||||
|
||||
$('.history-link').click(function (e) {
|
||||
// Incase they click 'back' in the browser, it should be removed.
|
||||
$(this).closest('tr').removeClass('unviewed');
|
||||
});
|
||||
|
||||
$('.with-share-link > *').click(function () {
|
||||
$("#copied-clipboard").remove();
|
||||
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -3,15 +3,16 @@
|
||||
"version": "0.0.3",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"watch": "node-sass -w scss -o .",
|
||||
"build": "node-sass scss -o ."
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"scripts": {
|
||||
"watch": "sass --watch scss:. --style=compressed --no-source-map",
|
||||
"build": "sass scss:. --style=compressed --no-source-map"
|
||||
},
|
||||
"author": "Leigh Morresi / Web Technologies s.r.o.",
|
||||
"license": "Apache",
|
||||
"dependencies": {
|
||||
"node-sass": "^7.0.0",
|
||||
"tar": "^6.1.9",
|
||||
"trim-newlines": "^3.0.1"
|
||||
"sass": "^1.77.8"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
@import "parts/_variables.scss";
|
||||
@use "parts/variables";
|
||||
|
||||
#diff-ui {
|
||||
|
||||
|
||||
@@ -64,17 +64,17 @@ body.proxy-check-active {
|
||||
#recommended-proxy {
|
||||
display: grid;
|
||||
gap: 2rem;
|
||||
@media (min-width: 991px) {
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
}
|
||||
padding-bottom: 1em;
|
||||
|
||||
@media (min-width: 991px) {
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
}
|
||||
|
||||
> div {
|
||||
border: 1px #aaa solid;
|
||||
border-radius: 4px;
|
||||
padding: 1em;
|
||||
}
|
||||
|
||||
padding-bottom: 1em;
|
||||
}
|
||||
|
||||
#extra-proxies-setting {
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
.watch-table {
|
||||
&.favicon-not-enabled {
|
||||
tr {
|
||||
.favicon {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tr {
|
||||
/* make the icons and the text inline-ish */
|
||||
td.inline.title-col {
|
||||
.flex-wrapper {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
td,
|
||||
th {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
tr.has-favicon {
|
||||
&.unviewed {
|
||||
img.favicon {
|
||||
opacity: 1.0 !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.status-icons {
|
||||
white-space: nowrap;
|
||||
display: flex;
|
||||
align-items: center; /* Vertical centering */
|
||||
gap: 4px; /* Space between image and text */
|
||||
> * {
|
||||
vertical-align: middle;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.title-col {
|
||||
/* Optional, for spacing */
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
.title-wrapper {
|
||||
display: flex;
|
||||
align-items: center; /* Vertical centering */
|
||||
gap: 10px; /* Space between image and text */
|
||||
}
|
||||
|
||||
/* Make sure .title-col-inner doesn't collapse or misalign */
|
||||
.title-col-inner {
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
/* favicon styling */
|
||||
.watch-table {
|
||||
img.favicon {
|
||||
vertical-align: middle;
|
||||
max-width: 25px;
|
||||
max-height: 25px;
|
||||
height: 25px;
|
||||
padding-right: 4px;
|
||||
}
|
||||
|
||||
// Reserved for future use
|
||||
/* &.thumbnail-type-screenshot {
|
||||
tr.has-favicon {
|
||||
td.inline.title-col {
|
||||
img.thumbnail {
|
||||
background-color: #fff; !* fallback bg for SVGs without bg *!
|
||||
border-radius: 4px; !* subtle rounded corners *!
|
||||
border: 1px solid #ddd; !* light border for contrast *!
|
||||
box-shadow: 0 2px 6px rgba(0, 0, 0, 0.15); !* soft shadow *!
|
||||
filter: contrast(1.05) saturate(1.1) drop-shadow(0 0 0.5px rgba(0, 0, 0, 0.2));
|
||||
object-fit: cover; !* crop/fill if needed *!
|
||||
opacity: 0.8;
|
||||
max-width: 30px;
|
||||
max-height: 30px;
|
||||
height: 30px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}*/
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
@import "minitabs";
|
||||
@use "minitabs";
|
||||
|
||||
body.preview-text-enabled {
|
||||
|
||||
|
||||
@@ -78,6 +78,7 @@
|
||||
--color-text-watch-tag-list: rgba(231, 0, 105, 0.4);
|
||||
--color-background-new-watch-form: rgba(0, 0, 0, 0.05);
|
||||
--color-background-new-watch-input: var(--color-white);
|
||||
--color-background-new-watch-input-transparent: rgba(255, 255, 255, 0.1);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
|
||||
--color-border-input: var(--color-grey-500);
|
||||
@@ -112,6 +113,7 @@ html[data-darkmode="true"] {
|
||||
--color-background-gradient-third: #4d2c64;
|
||||
|
||||
--color-background-new-watch-input: var(--color-grey-100);
|
||||
--color-background-new-watch-input-transparent: var(--color-grey-100);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-background-table-thead: var(--color-grey-200);
|
||||
--color-table-background: var(--color-grey-300);
|
||||
|
||||
@@ -0,0 +1,178 @@
|
||||
$grid-col-checkbox: 20px;
|
||||
$grid-col-watch: 100px;
|
||||
$grid-gap: 0.5rem;
|
||||
|
||||
|
||||
@media (max-width: 767px) {
|
||||
|
||||
/*
|
||||
Max width before this PARTICULAR table gets nasty
|
||||
This query will take effect for any screen smaller than 760px
|
||||
and also iPads specifically.
|
||||
*/
|
||||
.watch-table {
|
||||
/* make headings work on mobile */
|
||||
thead {
|
||||
display: block;
|
||||
|
||||
tr {
|
||||
th {
|
||||
display: inline-block;
|
||||
// Hide the "Last" text for smaller screens
|
||||
@media (max-width: 768px) {
|
||||
.hide-on-mobile {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.empty-cell {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.last-checked {
|
||||
margin-left: calc($grid-col-checkbox + $grid-gap);
|
||||
|
||||
> span {
|
||||
vertical-align: middle;
|
||||
}
|
||||
}
|
||||
|
||||
.last-changed {
|
||||
margin-left: calc($grid-col-checkbox + $grid-gap);
|
||||
}
|
||||
|
||||
.last-checked::before {
|
||||
color: var(--color-text);
|
||||
content: "Last Checked ";
|
||||
}
|
||||
|
||||
.last-changed::before {
|
||||
color: var(--color-text);
|
||||
content: "Last Changed ";
|
||||
}
|
||||
|
||||
/* Force table to not be like tables anymore */
|
||||
td.inline {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.pure-table td,
|
||||
.pure-table th {
|
||||
border: none;
|
||||
}
|
||||
|
||||
td {
|
||||
/* Behave like a "row" */
|
||||
border: none;
|
||||
border-bottom: 1px solid var(--color-border-watch-table-cell);
|
||||
vertical-align: middle;
|
||||
|
||||
&:before {
|
||||
/* Top/left values mimic padding */
|
||||
top: 6px;
|
||||
left: 6px;
|
||||
width: 45%;
|
||||
padding-right: 10px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
}
|
||||
|
||||
&.pure-table-striped {
|
||||
tr {
|
||||
background-color: var(--color-table-background);
|
||||
}
|
||||
|
||||
tr:nth-child(2n-1) {
|
||||
background-color: var(--color-table-stripe);
|
||||
}
|
||||
|
||||
tr:nth-child(2n-1) td {
|
||||
background-color: inherit;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 767px) {
|
||||
.watch-table {
|
||||
tbody {
|
||||
tr {
|
||||
padding-bottom: 10px;
|
||||
padding-top: 10px;
|
||||
display: grid;
|
||||
grid-template-columns: $grid-col-checkbox 1fr $grid-col-watch;
|
||||
grid-template-rows: auto auto auto auto;
|
||||
gap: $grid-gap;
|
||||
|
||||
.counter-i {
|
||||
display: none;
|
||||
}
|
||||
|
||||
td.checkbox-uuid {
|
||||
display: grid;
|
||||
place-items: center;
|
||||
}
|
||||
|
||||
td.inline {
|
||||
/* display: block !important;;*/
|
||||
}
|
||||
|
||||
> td {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
> td.title-col {
|
||||
grid-column: 1 / -1;
|
||||
grid-row: 1;
|
||||
.watch-title {
|
||||
font-size: 0.92rem;
|
||||
}
|
||||
.link-spread {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
> td.last-checked {
|
||||
grid-column: 1 / -1;
|
||||
grid-row: 2;
|
||||
}
|
||||
|
||||
> td.last-changed {
|
||||
grid-column: 1 / -1;
|
||||
grid-row: 3;
|
||||
}
|
||||
|
||||
> td.checkbox-uuid {
|
||||
grid-column: 1;
|
||||
grid-row: 4;
|
||||
}
|
||||
|
||||
> td.buttons {
|
||||
grid-column: 2;
|
||||
grid-row: 4;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: flex-start;
|
||||
}
|
||||
|
||||
> td.watch-controls {
|
||||
grid-column: 3;
|
||||
grid-row: 4;
|
||||
display: grid;
|
||||
place-items: center;
|
||||
|
||||
a img {
|
||||
padding: 10px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.pure-table td {
|
||||
padding: 3px !important;
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,7 @@
|
||||
&.unviewed {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
color: var(--color-watch-table-row-text);
|
||||
}
|
||||
|
||||
@@ -17,11 +18,13 @@
|
||||
&.title-col {
|
||||
word-break: break-all;
|
||||
white-space: normal;
|
||||
a::after {
|
||||
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
|
||||
margin: 0 3px 0 5px;
|
||||
}
|
||||
}
|
||||
|
||||
a.external::after {
|
||||
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
|
||||
margin: 0 3px 0 5px;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -46,17 +49,17 @@
|
||||
/* Row with 'checking-now' */
|
||||
tr.checking-now {
|
||||
td:first-child {
|
||||
position: relative;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
td:first-child::before {
|
||||
content: "";
|
||||
position: absolute;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
width: 3px;
|
||||
background-color: #293eff;
|
||||
content: "";
|
||||
position: absolute;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
width: 3px;
|
||||
background-color: #293eff;
|
||||
}
|
||||
|
||||
td.last-checked {
|
||||
@@ -107,6 +110,7 @@
|
||||
|
||||
tr.has-error {
|
||||
color: var(--color-watch-table-error);
|
||||
|
||||
.error-text {
|
||||
display: block !important;
|
||||
}
|
||||
@@ -117,6 +121,7 @@
|
||||
display: inline-block !important;
|
||||
}
|
||||
}
|
||||
|
||||
tr.multiple-history {
|
||||
a.history-link {
|
||||
display: inline-block !important;
|
||||
@@ -124,5 +129,3 @@
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -2,21 +2,25 @@
|
||||
* -- BASE STYLES --
|
||||
*/
|
||||
|
||||
@import "parts/_arrows";
|
||||
@import "parts/_browser-steps";
|
||||
@import "parts/_extra_proxies";
|
||||
@import "parts/_extra_browsers";
|
||||
@import "parts/_pagination";
|
||||
@import "parts/_spinners";
|
||||
@import "parts/_variables";
|
||||
@import "parts/_darkmode";
|
||||
@import "parts/_menu";
|
||||
@import "parts/_love";
|
||||
@import "parts/preview_text_filter";
|
||||
@import "parts/_watch_table";
|
||||
@import "parts/_edit";
|
||||
@import "parts/_conditions_table";
|
||||
@import "parts/_socket";
|
||||
@use "parts/variables";
|
||||
@use "parts/arrows";
|
||||
@use "parts/browser-steps";
|
||||
@use "parts/extra_proxies";
|
||||
@use "parts/extra_browsers";
|
||||
@use "parts/pagination";
|
||||
@use "parts/spinners";
|
||||
@use "parts/darkmode";
|
||||
@use "parts/menu";
|
||||
@use "parts/love";
|
||||
@use "parts/preview_text_filter";
|
||||
@use "parts/watch_table";
|
||||
@use "parts/watch_table-mobile";
|
||||
@use "parts/edit";
|
||||
@use "parts/conditions_table";
|
||||
@use "parts/lister_extra";
|
||||
@use "parts/socket";
|
||||
@use "parts/visualselector";
|
||||
|
||||
|
||||
body {
|
||||
color: var(--color-text);
|
||||
@@ -184,8 +188,15 @@ code {
|
||||
@extend .inline-tag;
|
||||
}
|
||||
|
||||
@media (min-width: 768px) {
|
||||
.box {
|
||||
margin: 0 1em !important;
|
||||
}
|
||||
}
|
||||
|
||||
.box {
|
||||
max-width: 80%;
|
||||
max-width: 100%;
|
||||
margin: 0 0.3em;
|
||||
flex-direction: column;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
@@ -279,7 +290,7 @@ a.pure-button-selected {
|
||||
font-size: 65%;
|
||||
border-bottom-left-radius: initial;
|
||||
border-bottom-right-radius: initial;
|
||||
|
||||
margin-right: 4px;
|
||||
&.active {
|
||||
background: var(--color-background-button-tag-active);
|
||||
font-weight: bold;
|
||||
@@ -372,11 +383,32 @@ label {
|
||||
}
|
||||
}
|
||||
|
||||
// Some field colouring for transperant field
|
||||
.pure-form input[type=text].transparent-field {
|
||||
background-color: var(--color-background-new-watch-input-transparent) !important;
|
||||
color: var(--color-white) !important;
|
||||
border: 1px solid rgba(255, 255, 255, 0.2) !important;
|
||||
box-shadow: none !important;
|
||||
-webkit-box-shadow: none !important;
|
||||
&::placeholder {
|
||||
opacity: 0.5;
|
||||
color: rgba(255, 255, 255, 0.7);
|
||||
font-weight: lighter;
|
||||
}
|
||||
}
|
||||
|
||||
#new-watch-form {
|
||||
background: var(--color-background-new-watch-form);
|
||||
padding: 1em;
|
||||
border-radius: 10px;
|
||||
margin-bottom: 1em;
|
||||
max-width: 100%;
|
||||
|
||||
#url {
|
||||
&::placeholder {
|
||||
font-weight: bold;
|
||||
}
|
||||
}
|
||||
|
||||
input {
|
||||
display: inline-block;
|
||||
@@ -397,12 +429,13 @@ label {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
#watch-add-wrapper-zone {
|
||||
|
||||
#watch-add-wrapper-zone {
|
||||
@media only screen and (min-width: 760px) {
|
||||
display: flex;
|
||||
gap: 0.3rem;
|
||||
flex-direction: row;
|
||||
min-width: 70vw;
|
||||
}
|
||||
/* URL field grows always, other stay static in width */
|
||||
> span {
|
||||
@@ -424,6 +457,22 @@ label {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#watch-group-tag {
|
||||
font-size: 0.9rem;
|
||||
padding: 0.3rem;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
color: var(--color-white);
|
||||
label, input {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
input {
|
||||
flex: 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -620,10 +669,6 @@ footer {
|
||||
|
||||
@media only screen and (max-width: 760px),
|
||||
(min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
.box {
|
||||
max-width: 95%
|
||||
}
|
||||
|
||||
.edit-form {
|
||||
padding: 0.5em;
|
||||
margin: 0;
|
||||
@@ -659,114 +704,6 @@ footer {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
/*
|
||||
Max width before this PARTICULAR table gets nasty
|
||||
This query will take effect for any screen smaller than 760px
|
||||
and also iPads specifically.
|
||||
*/
|
||||
.watch-table {
|
||||
/* make headings work on mobile */
|
||||
thead {
|
||||
display: block;
|
||||
tr {
|
||||
th {
|
||||
display: inline-block;
|
||||
// Hide the "Last" text for smaller screens
|
||||
@media (max-width: 768px) {
|
||||
.hide-on-mobile {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.empty-cell {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
/* Force table to not be like tables anymore */
|
||||
tbody {
|
||||
td,
|
||||
tr {
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
|
||||
tbody {
|
||||
tr {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
|
||||
// The third child of each row will take up the remaining space
|
||||
// This is useful for the URL column, which should expand to fill the remaining space
|
||||
:nth-child(3) {
|
||||
flex-grow: 1;
|
||||
}
|
||||
// The last three children (from the end) of each row will take up the full width
|
||||
// This is useful for the "Last Checked", "Last Changed", and the action buttons columns, which should each take up the full width
|
||||
:nth-last-child(-n+3) {
|
||||
flex-basis: 100%;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.last-checked {
|
||||
>span {
|
||||
vertical-align: middle;
|
||||
}
|
||||
}
|
||||
|
||||
.last-checked::before {
|
||||
color: var(--color-last-checked);
|
||||
content: "Last Checked ";
|
||||
}
|
||||
|
||||
.last-changed::before {
|
||||
color: var(--color-last-checked);
|
||||
content: "Last Changed ";
|
||||
}
|
||||
|
||||
/* Force table to not be like tables anymore */
|
||||
td.inline {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.pure-table td,
|
||||
.pure-table th {
|
||||
border: none;
|
||||
}
|
||||
|
||||
td {
|
||||
/* Behave like a "row" */
|
||||
border: none;
|
||||
border-bottom: 1px solid var(--color-border-watch-table-cell);
|
||||
vertical-align: middle;
|
||||
|
||||
&:before {
|
||||
/* Top/left values mimic padding */
|
||||
top: 6px;
|
||||
left: 6px;
|
||||
width: 45%;
|
||||
padding-right: 10px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
}
|
||||
|
||||
&.pure-table-striped {
|
||||
tr {
|
||||
background-color: var(--color-table-background);
|
||||
}
|
||||
|
||||
tr:nth-child(2n-1) {
|
||||
background-color: var(--color-table-stripe);
|
||||
}
|
||||
|
||||
tr:nth-child(2n-1) td {
|
||||
background-color: inherit;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
.pure-table {
|
||||
@@ -1021,8 +958,6 @@ ul {
|
||||
}
|
||||
}
|
||||
|
||||
@import "parts/_visualselector";
|
||||
|
||||
#webdriver_delay {
|
||||
width: 5em;
|
||||
}
|
||||
@@ -1140,19 +1075,23 @@ ul {
|
||||
|
||||
|
||||
#quick-watch-processor-type {
|
||||
color: #fff;
|
||||
ul {
|
||||
padding: 0.3rem;
|
||||
|
||||
ul#processor {
|
||||
color: #fff;
|
||||
padding-left: 0px;
|
||||
li {
|
||||
list-style: none;
|
||||
font-size: 0.8rem;
|
||||
> * {
|
||||
display: inline-block;
|
||||
}
|
||||
font-size: 0.9rem;
|
||||
display: grid;
|
||||
grid-template-columns: auto 1fr;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
}
|
||||
|
||||
label, input {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.restock-label {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -13,6 +13,7 @@ import json
|
||||
import os
|
||||
import re
|
||||
import secrets
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import uuid as uuid_builder
|
||||
@@ -45,7 +46,7 @@ class ChangeDetectionStore:
|
||||
# logging.basicConfig(filename='/dev/stdout', level=logging.INFO)
|
||||
self.__data = App.model()
|
||||
self.datastore_path = datastore_path
|
||||
self.json_store_path = "{}/url-watches.json".format(self.datastore_path)
|
||||
self.json_store_path = os.path.join(self.datastore_path, "url-watches.json")
|
||||
logger.info(f"Datastore path is '{self.json_store_path}'")
|
||||
self.needs_write = False
|
||||
self.start_time = time.time()
|
||||
@@ -118,14 +119,12 @@ class ChangeDetectionStore:
|
||||
test_list = self.proxy_list
|
||||
|
||||
# Helper to remove password protection
|
||||
password_reset_lockfile = "{}/removepassword.lock".format(self.datastore_path)
|
||||
password_reset_lockfile = os.path.join(self.datastore_path, "removepassword.lock")
|
||||
if path.isfile(password_reset_lockfile):
|
||||
self.__data['settings']['application']['password'] = False
|
||||
unlink(password_reset_lockfile)
|
||||
|
||||
if not 'app_guid' in self.__data:
|
||||
import os
|
||||
import sys
|
||||
if "pytest" in sys.modules or "PYTEST_CURRENT_TEST" in os.environ:
|
||||
self.__data['app_guid'] = "test-" + str(uuid_builder.uuid4())
|
||||
else:
|
||||
@@ -386,9 +385,9 @@ class ChangeDetectionStore:
|
||||
return new_uuid
|
||||
|
||||
def visualselector_data_is_ready(self, watch_uuid):
|
||||
output_path = "{}/{}".format(self.datastore_path, watch_uuid)
|
||||
screenshot_filename = "{}/last-screenshot.png".format(output_path)
|
||||
elements_index_filename = "{}/elements.deflate".format(output_path)
|
||||
output_path = os.path.join(self.datastore_path, watch_uuid)
|
||||
screenshot_filename = os.path.join(output_path, "last-screenshot.png")
|
||||
elements_index_filename = os.path.join(output_path, "elements.deflate")
|
||||
if path.isfile(screenshot_filename) and path.isfile(elements_index_filename) :
|
||||
return True
|
||||
|
||||
@@ -412,11 +411,7 @@ class ChangeDetectionStore:
|
||||
# system was out of memory, out of RAM etc
|
||||
with open(self.json_store_path+".tmp", 'w') as json_file:
|
||||
# Use compact JSON in production for better performance
|
||||
debug_mode = os.environ.get('CHANGEDETECTION_DEBUG', 'false').lower() == 'true'
|
||||
if debug_mode:
|
||||
json.dump(data, json_file, indent=4)
|
||||
else:
|
||||
json.dump(data, json_file, separators=(',', ':'))
|
||||
json.dump(data, json_file, indent=2)
|
||||
os.replace(self.json_store_path+".tmp", self.json_store_path)
|
||||
except Exception as e:
|
||||
logger.error(f"Error writing JSON!! (Main JSON file save was skipped) : {str(e)}")
|
||||
@@ -478,7 +473,7 @@ class ChangeDetectionStore:
|
||||
|
||||
# Load from external config file
|
||||
if path.isfile(proxy_list_file):
|
||||
with open("{}/proxies.json".format(self.datastore_path)) as f:
|
||||
with open(os.path.join(self.datastore_path, "proxies.json")) as f:
|
||||
proxy_list = json.load(f)
|
||||
|
||||
# Mapping from UI config if available
|
||||
@@ -736,10 +731,10 @@ class ChangeDetectionStore:
|
||||
logger.critical(f"Applying update_{update_n}")
|
||||
# Wont exist on fresh installs
|
||||
if os.path.exists(self.json_store_path):
|
||||
shutil.copyfile(self.json_store_path, self.datastore_path+"/url-watches-before-{}.json".format(update_n))
|
||||
shutil.copyfile(self.json_store_path, os.path.join(self.datastore_path, f"url-watches-before-{update_n}.json"))
|
||||
|
||||
try:
|
||||
update_method = getattr(self, "update_{}".format(update_n))()
|
||||
update_method = getattr(self, f"update_{update_n}")()
|
||||
except Exception as e:
|
||||
logger.error(f"Error while trying update_{update_n}")
|
||||
logger.error(e)
|
||||
|
||||
@@ -74,7 +74,7 @@
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{watch_tag}}' }}</code></td>
|
||||
<td>The watch label / tag</td>
|
||||
<td>The watch group / tag</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{preview_url}}' }}</code></td>
|
||||
|
||||
@@ -2,19 +2,24 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
import os
|
||||
from ..util import live_server_setup, wait_for_all_checks
|
||||
import logging
|
||||
|
||||
|
||||
# Requires playwright to be installed
|
||||
def test_fetch_webdriver_content(client, live_server, measure_memory_usage):
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
|
||||
#####################
|
||||
res = client.post(
|
||||
url_for("settings.settings_page"),
|
||||
data={"application-empty_pages_are_a_change": "",
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-fetch_backend': "html_webdriver"},
|
||||
data={
|
||||
"application-empty_pages_are_a_change": "",
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-fetch_backend': "html_webdriver",
|
||||
'application-ui-favicons_enabled': "y",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
@@ -30,11 +35,51 @@ def test_fetch_webdriver_content(client, live_server, measure_memory_usage):
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
|
||||
|
||||
res = client.get(
|
||||
url_for("ui.ui_views.preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
logging.getLogger().info("Looking for correct fetched HTML (text) from server")
|
||||
|
||||
assert b'cool it works' in res.data
|
||||
|
||||
# Favicon scraper check, favicon only so far is fetched when in browser mode (not requests mode)
|
||||
if os.getenv("PLAYWRIGHT_DRIVER_URL"):
|
||||
uuid = next(iter(live_server.app.config['DATASTORE'].data['watching']))
|
||||
res = client.get(
|
||||
url_for("watchlist.index"),
|
||||
)
|
||||
# The UI can access it here
|
||||
assert f'src="/static/favicon/{uuid}'.encode('utf8') in res.data
|
||||
|
||||
# Attempt to fetch it, make sure that works
|
||||
res = client.get(url_for('static_content', group='favicon', filename=uuid))
|
||||
assert res.status_code == 200
|
||||
assert len(res.data) > 10
|
||||
|
||||
# Check the API also returns it
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
res = client.get(
|
||||
url_for("watchfavicon", uuid=uuid),
|
||||
headers={'x-api-key': api_key}
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert len(res.data) > 10
|
||||
|
||||
##################### disable favicons check
|
||||
res = client.post(
|
||||
url_for("settings.settings_page"),
|
||||
data={
|
||||
"requests-time_between_check-minutes": 180,
|
||||
'application-ui-favicons_enabled': "",
|
||||
"application-empty_pages_are_a_change": "",
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
res = client.get(
|
||||
url_for("watchlist.index"),
|
||||
)
|
||||
# The UI can access it here
|
||||
assert f'src="/static/favicon'.encode('utf8') not in res.data
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
import asyncio
|
||||
import socketio
|
||||
from aiohttp import web
|
||||
|
||||
SOCKETIO_URL = 'ws://localhost.localdomain:5005'
|
||||
SOCKETIO_PATH = "/socket.io"
|
||||
NUM_CLIENTS = 1
|
||||
|
||||
clients = []
|
||||
shutdown_event = asyncio.Event()
|
||||
|
||||
class WatchClient:
|
||||
def __init__(self, client_id: int):
|
||||
self.client_id = client_id
|
||||
self.i_got_watch_update_event = False
|
||||
self.sio = socketio.AsyncClient(reconnection_attempts=50, reconnection_delay=1)
|
||||
|
||||
@self.sio.event
|
||||
async def connect():
|
||||
print(f"[Client {self.client_id}] Connected")
|
||||
|
||||
@self.sio.event
|
||||
async def disconnect():
|
||||
print(f"[Client {self.client_id}] Disconnected")
|
||||
|
||||
@self.sio.on("watch_update")
|
||||
async def on_watch_update(watch):
|
||||
self.i_got_watch_update_event = True
|
||||
print(f"[Client {self.client_id}] Received update: {watch}")
|
||||
|
||||
async def run(self):
|
||||
try:
|
||||
await self.sio.connect(SOCKETIO_URL, socketio_path=SOCKETIO_PATH, transports=["websocket", "polling"])
|
||||
await self.sio.wait()
|
||||
except Exception as e:
|
||||
print(f"[Client {self.client_id}] Connection error: {e}")
|
||||
|
||||
async def handle_check(request):
|
||||
all_received = all(c.i_got_watch_update_event for c in clients)
|
||||
result = "yes" if all_received else "no"
|
||||
print(f"Received HTTP check — returning '{result}'")
|
||||
shutdown_event.set() # Signal shutdown
|
||||
return web.Response(text=result)
|
||||
|
||||
async def start_http_server():
|
||||
app = web.Application()
|
||||
app.add_routes([web.get('/did_all_clients_get_watch_update', handle_check)])
|
||||
runner = web.AppRunner(app)
|
||||
await runner.setup()
|
||||
site = web.TCPSite(runner, '0.0.0.0', 6666)
|
||||
await site.start()
|
||||
|
||||
async def main():
|
||||
#await start_http_server()
|
||||
|
||||
for i in range(NUM_CLIENTS):
|
||||
client = WatchClient(i)
|
||||
clients.append(client)
|
||||
asyncio.create_task(client.run())
|
||||
|
||||
await shutdown_event.wait()
|
||||
|
||||
print("Shutting down...")
|
||||
# Graceful disconnect
|
||||
for c in clients:
|
||||
await c.sio.disconnect()
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
asyncio.run(main())
|
||||
except KeyboardInterrupt:
|
||||
print("Interrupted")
|
||||
@@ -292,9 +292,7 @@ def test_access_denied(client, live_server, measure_memory_usage):
|
||||
|
||||
def test_api_watch_PUT_update(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
api_key = live_server.app.config['DATASTORE'].data['settings']['application'].get('api_access_token')
|
||||
|
||||
# Create a watch
|
||||
set_original_response()
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
@@ -302,14 +300,27 @@ def test_api_watch_PUT_update(client, live_server, measure_memory_usage):
|
||||
# Create new
|
||||
res = client.post(
|
||||
url_for("createwatch"),
|
||||
data=json.dumps({"url": test_url, 'tag': "One, Two", "title": "My test URL", 'headers': {'cookie': 'yum'} }),
|
||||
data=json.dumps({"url": test_url,
|
||||
'tag': "One, Two",
|
||||
"title": "My test URL",
|
||||
'headers': {'cookie': 'yum'},
|
||||
"conditions": [
|
||||
{
|
||||
"field": "page_filtered_text",
|
||||
"operator": "contains_regex",
|
||||
"value": "." # contains anything
|
||||
}
|
||||
],
|
||||
"conditions_match_logic": "ALL"
|
||||
}
|
||||
),
|
||||
headers={'content-type': 'application/json', 'x-api-key': api_key},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert res.status_code == 201
|
||||
|
||||
|
||||
wait_for_all_checks(client)
|
||||
# Get a listing, it will be the first one
|
||||
res = client.get(
|
||||
url_for("createwatch"),
|
||||
|
||||
@@ -4,6 +4,8 @@ import time
|
||||
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks
|
||||
from ..model import CONDITIONS_MATCH_LOGIC_DEFAULT
|
||||
|
||||
|
||||
def set_original_response(number="50"):
|
||||
test_return_data = f"""<html>
|
||||
@@ -76,7 +78,7 @@ def test_conditions_with_text_and_number(client, live_server):
|
||||
"fetch_backend": "html_requests",
|
||||
"include_filters": ".number-container",
|
||||
"title": "Number AND Text Condition Test",
|
||||
"conditions_match_logic": "ALL", # ALL = AND logic
|
||||
"conditions_match_logic": CONDITIONS_MATCH_LOGIC_DEFAULT, # ALL = AND logic
|
||||
"conditions-0-operator": "in",
|
||||
"conditions-0-field": "page_filtered_text",
|
||||
"conditions-0-value": "5",
|
||||
@@ -283,7 +285,7 @@ def test_lev_conditions_plugin(client, live_server, measure_memory_usage):
|
||||
data={
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_requests",
|
||||
"conditions_match_logic": "ALL", # ALL = AND logic
|
||||
"conditions_match_logic": CONDITIONS_MATCH_LOGIC_DEFAULT, # ALL = AND logic
|
||||
"conditions-0-field": "levenshtein_ratio",
|
||||
"conditions-0-operator": "<",
|
||||
"conditions-0-value": "0.8" # needs to be more of a diff to trigger a change
|
||||
|
||||
@@ -46,7 +46,7 @@ def test_check_extract_text_from_diff(client, live_server, measure_memory_usage)
|
||||
follow_redirects=False
|
||||
)
|
||||
|
||||
assert b'Nothing matches that RegEx' not in res.data
|
||||
assert b'No matches found while scanning all of the watch history for that RegEx.' not in res.data
|
||||
assert res.content_type == 'text/csv'
|
||||
|
||||
# Read the csv reply as stringio
|
||||
|
||||
@@ -236,39 +236,41 @@ def test_group_tag_notification(client, live_server, measure_memory_usage):
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_limit_tag_ui(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
urls=[]
|
||||
test_url = url_for('test_random_content_endpoint', _external=True)
|
||||
|
||||
for i in range(20):
|
||||
urls.append(test_url+"?x="+str(i)+" test-tag")
|
||||
|
||||
for i in range(20):
|
||||
urls.append(test_url+"?non-grouped="+str(i))
|
||||
|
||||
res = client.post(
|
||||
# A space can label the tag, only the first one will have a tag
|
||||
client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": "\r\n".join(urls)},
|
||||
data={"urls": f"{test_url} test-tag\r\n{test_url}"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"40 Imported" in res.data
|
||||
tag_uuid = get_UUID_for_tag_name(client, name="test-tag")
|
||||
assert tag_uuid
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'test-tag' in res.data
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# All should be here
|
||||
assert res.data.count(b'processor-text_json_diff') == 40
|
||||
# Should be both unviewed
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert res.data.count(b' unviewed ') == 2
|
||||
|
||||
tag_uuid = get_UUID_for_tag_name(client, name="test-tag")
|
||||
|
||||
res = client.get(url_for("watchlist.index", tag=tag_uuid))
|
||||
# Now we recheck only the tag
|
||||
client.get(url_for('ui.mark_all_viewed', tag=tag_uuid), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
with open('/tmp/fuck.html', 'wb') as f:
|
||||
f.write(res.data)
|
||||
# Should be only 1 unviewed
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert res.data.count(b' unviewed ') == 1
|
||||
|
||||
|
||||
# Just a subset should be here
|
||||
assert b'test-tag' in res.data
|
||||
assert res.data.count(b'processor-text_json_diff') == 20
|
||||
assert b"object at" not in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||
|
||||
@@ -79,3 +79,48 @@ def test_consistent_history(client, live_server, measure_memory_usage):
|
||||
json_db_file = os.path.join(live_server.app.config['DATASTORE'].datastore_path, 'url-watches.json')
|
||||
with open(json_db_file, 'r') as f:
|
||||
assert '"default"' not in f.read(), "'default' probably shouldnt be here, it came from when the 'default' Watch vars were accidently being saved"
|
||||
|
||||
|
||||
def test_check_text_history_view(client, live_server):
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("<html>test-one</html>")
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Set second version, Make a change
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("<html>test-two</html>")
|
||||
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
assert b'test-one' in res.data
|
||||
assert b'test-two' in res.data
|
||||
|
||||
# Set third version, Make a change
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("<html>test-three</html>")
|
||||
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should remember the last viewed time, so the first difference is not shown
|
||||
res = client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
assert b'test-three' in res.data
|
||||
assert b'test-two' in res.data
|
||||
assert b'test-one' not in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
@@ -419,13 +419,20 @@ def check_json_ext_filter(json_filter, client, live_server):
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
res = client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
res = client.get(url_for("ui.ui_views.preview_page", uuid="first"))
|
||||
|
||||
# We should never see 'ForSale' because we are selecting on 'Sold' in the rule,
|
||||
# But we should know it triggered ('unviewed' assert above)
|
||||
assert b'ForSale' not in res.data
|
||||
assert b'Sold' in res.data
|
||||
|
||||
|
||||
# And the difference should have both?
|
||||
|
||||
res = client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
assert b'ForSale' in res.data
|
||||
assert b'Sold' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
@@ -5,8 +5,22 @@ from .util import live_server_setup, wait_for_all_checks
|
||||
from .. import strtobool
|
||||
|
||||
|
||||
# def test_setup(client, live_server, measure_memory_usage):
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
def set_original_response():
|
||||
test_return_data = """<html>
|
||||
<head><title>head title</title></head>
|
||||
<body>
|
||||
Some initial text<br>
|
||||
<p>Which is across multiple lines</p>
|
||||
<br>
|
||||
So let's see what happens. <br>
|
||||
<span class="foobar-detection" style='display:none'></span>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
def test_bad_access(client, live_server, measure_memory_usage):
|
||||
|
||||
@@ -118,3 +132,33 @@ def test_xss(client, live_server, measure_memory_usage):
|
||||
assert b"<img src=x onerror=alert(" not in res.data
|
||||
assert b"<img" in res.data
|
||||
|
||||
|
||||
def test_xss_watch_last_error(client, live_server, measure_memory_usage):
|
||||
set_original_response()
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": url_for('test_endpoint', _external=True)},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={
|
||||
"include_filters": '<a href="https://foobar"></a><script>alert(123);</script>',
|
||||
"url": url_for('test_endpoint', _external=True),
|
||||
'fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
|
||||
assert b"<script>alert(123);</script>" not in res.data # this text should be there
|
||||
assert b'<a href="https://foobar"></a><script>alert(123);</script>' in res.data
|
||||
assert b"https://foobar" in res.data # this text should be there
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from changedetectionio.conditions import execute_ruleset_against_all_plugins
|
||||
from changedetectionio.model import CONDITIONS_MATCH_LOGIC_DEFAULT
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
import shutil
|
||||
import tempfile
|
||||
@@ -59,7 +60,7 @@ class TestTriggerConditions(unittest.TestCase):
|
||||
|
||||
self.store.data['watching'][self.watch_uuid].update(
|
||||
{
|
||||
"conditions_match_logic": "ALL",
|
||||
"conditions_match_logic": CONDITIONS_MATCH_LOGIC_DEFAULT,
|
||||
"conditions": [
|
||||
{"operator": ">=", "field": "extracted_number", "value": "10"},
|
||||
{"operator": "<=", "field": "extracted_number", "value": "5000"},
|
||||
|
||||
@@ -51,6 +51,9 @@ class TestJinja2SSTI(unittest.TestCase):
|
||||
for attempt in attempt_list:
|
||||
self.assertEqual(len(safe_jinja.render(attempt)), 0, f"string test '{attempt}' is correctly empty")
|
||||
|
||||
def test_jinja2_escaped_html(self):
|
||||
x = safe_jinja.render_fully_escaped('woo <a href="https://google.com">dfdfd</a>')
|
||||
self.assertEqual(x, "woo <a href="https://google.com">dfdfd</a>")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -188,15 +188,54 @@ def is_watch_running(watch_uuid):
|
||||
|
||||
|
||||
def queue_item_async_safe(update_q, item):
|
||||
"""Queue an item for async queue processing"""
|
||||
if async_loop and not async_loop.is_closed():
|
||||
"""Bulletproof queue operation with comprehensive error handling"""
|
||||
item_uuid = 'unknown'
|
||||
|
||||
try:
|
||||
# Safely extract UUID for logging
|
||||
if hasattr(item, 'item') and isinstance(item.item, dict):
|
||||
item_uuid = item.item.get('uuid', 'unknown')
|
||||
except Exception as uuid_e:
|
||||
logger.critical(f"CRITICAL: Failed to extract UUID from queue item: {uuid_e}")
|
||||
|
||||
# Validate inputs
|
||||
if not update_q:
|
||||
logger.critical(f"CRITICAL: Queue is None/invalid for item {item_uuid}")
|
||||
return False
|
||||
|
||||
if not item:
|
||||
logger.critical(f"CRITICAL: Item is None/invalid")
|
||||
return False
|
||||
|
||||
# Attempt queue operation with multiple fallbacks
|
||||
try:
|
||||
# Primary: Use sync interface (thread-safe)
|
||||
success = update_q.put(item, block=True, timeout=5.0)
|
||||
if success is False: # Explicit False return means failure
|
||||
logger.critical(f"CRITICAL: Queue.put() returned False for item {item_uuid}")
|
||||
return False
|
||||
|
||||
logger.debug(f"Successfully queued item: {item_uuid}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(f"CRITICAL: Exception during queue operation for item {item_uuid}: {type(e).__name__}: {e}")
|
||||
|
||||
# Secondary: Attempt queue health check
|
||||
try:
|
||||
# For async queue, schedule the put operation
|
||||
asyncio.run_coroutine_threadsafe(update_q.put(item), async_loop)
|
||||
except RuntimeError as e:
|
||||
logger.error(f"Failed to queue item: {e}")
|
||||
else:
|
||||
logger.error("Async loop not available or closed for queueing item")
|
||||
queue_size = update_q.qsize()
|
||||
is_empty = update_q.empty()
|
||||
logger.critical(f"CRITICAL: Queue health - size: {queue_size}, empty: {is_empty}")
|
||||
except Exception as health_e:
|
||||
logger.critical(f"CRITICAL: Queue health check failed: {health_e}")
|
||||
|
||||
# Log queue type for debugging
|
||||
try:
|
||||
logger.critical(f"CRITICAL: Queue type: {type(update_q)}, has sync_q: {hasattr(update_q, 'sync_q')}")
|
||||
except Exception:
|
||||
logger.critical(f"CRITICAL: Cannot determine queue type")
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def shutdown_workers():
|
||||
|
||||
@@ -66,9 +66,22 @@ services:
|
||||
# A valid timezone name to run as (for scheduling watch checking) see https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
# - TZ=America/Los_Angeles
|
||||
#
|
||||
# Text processing locale, en_US.UTF-8 used by default unless defined as something else here, UTF-8 should cover 99.99% of cases.
|
||||
# - LC_ALL=en_US.UTF-8
|
||||
#
|
||||
# Maximum height of screenshots, default is 16000 px, screenshots will be clipped to this if exceeded.
|
||||
# RAM usage will be higher if you increase this.
|
||||
# - SCREENSHOT_MAX_HEIGHT=16000
|
||||
#
|
||||
# HTTPS SSL Mode for webserver, unset both of these, you may need to volume mount these files also.
|
||||
# ./cert.pem:/app/cert.pem and ./privkey.pem:/app/privkey.pem
|
||||
# - SSL_CERT_FILE=cert.pem
|
||||
# - SSL_PRIVKEY_FILE=privkey.pem
|
||||
#
|
||||
# LISTEN_HOST / "host", Same as -h
|
||||
# - LISTEN_HOST=::
|
||||
# - LISTEN_HOST=0.0.0.0
|
||||
|
||||
|
||||
# Comment out ports: when using behind a reverse proxy , enable networks: etc.
|
||||
ports:
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 171 KiB After Width: | Height: | Size: 200 KiB |
@@ -7,6 +7,7 @@ flask-paginate
|
||||
flask_expects_json~=1.7
|
||||
flask_restful
|
||||
flask_cors # For the Chrome extension to operate
|
||||
janus # Thread-safe async/sync queue bridge
|
||||
flask_wtf~=1.2
|
||||
flask~=2.3
|
||||
flask-socketio~=5.5.1
|
||||
@@ -117,6 +118,9 @@ price-parser
|
||||
|
||||
# flask_socket_io - incorrect package name, already have flask-socketio above
|
||||
|
||||
# So far for detecting correct favicon type, but for other things in the future
|
||||
python-magic
|
||||
|
||||
# Scheduler - Windows seemed to miss a lot of default timezone info (even "UTC" !)
|
||||
tzdata
|
||||
|
||||
|
||||
Reference in New Issue
Block a user